From 2df236edac5efc2ffc2cabf16bf9d90a3b71d064 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sat, 2 Mar 2024 02:42:47 -0500 Subject: [PATCH 01/31] (wip) Started refactor, added CLI, started moving validators to their own space --- index.ts | 105 +++++++++++++++------------------ package-lock.json | 46 +++++++++++++++ package.json | 2 + samples/auth0.json | 142 +++++++++++++++++++++++++++++++++++++++++++++ src/cli.ts | 65 +++++++++++++++++++++ src/env.ts | 30 ++++++++++ src/spinner.ts | 0 src/validators.ts | 24 ++++++++ 8 files changed, 355 insertions(+), 59 deletions(-) create mode 100644 samples/auth0.json create mode 100644 src/cli.ts create mode 100644 src/env.ts create mode 100644 src/spinner.ts create mode 100644 src/validators.ts diff --git a/index.ts b/index.ts index d8e5238..4260c70 100755 --- a/index.ts +++ b/index.ts @@ -1,68 +1,44 @@ import { config } from "dotenv"; config(); +import * as p from '@clack/prompts'; +import color from 'picocolors' +import { setTimeout } from 'node:timers/promises'; import * as fs from "fs"; import * as z from "zod"; import clerkClient from "@clerk/clerk-sdk-node"; import ora, { Ora } from "ora"; +import { authjsUserSchema } from "./src/validators"; +import { env } from "./src/env"; +import { runCLI } from "./src/cli"; -const SECRET_KEY = process.env.CLERK_SECRET_KEY; -const DELAY = parseInt(process.env.DELAY_MS ?? `1_000`); -const RETRY_DELAY = parseInt(process.env.RETRY_DELAY_MS ?? `10_000`); -const IMPORT_TO_DEV = process.env.IMPORT_TO_DEV_INSTANCE ?? "false"; -const OFFSET = parseInt(process.env.OFFSET ?? `0`); - -if (!SECRET_KEY) { - throw new Error( - "CLERK_SECRET_KEY is required. Please copy .env.example to .env and add your key." - ); -} - -if (SECRET_KEY.split("_")[1] !== "live" && IMPORT_TO_DEV === "false") { +if (env.CLERK_SECRET_KEY.split("_")[1] !== "live" && env.IMPORT_TO_DEV === false) { throw new Error( "The Clerk Secret Key provided is for a development instance. Development instances are limited to 500 users and do not share their userbase with production instances. If you want to import users to your development instance, please set 'IMPORT_TO_DEV_INSTANCE' in your .env to 'true'." ); } -const userSchema = z.object({ - userId: z.string(), - email: z.string().email(), - firstName: z.string().optional(), - lastName: z.string().optional(), - password: z.string().optional(), - passwordHasher: z - .enum([ - "argon2i", - "argon2id", - "bcrypt", - "md5", - "pbkdf2_sha256", - "pbkdf2_sha256_django", - "pbkdf2_sha1", - "scrypt_firebase", - ]) - .optional(), -}); - -type User = z.infer; + + +type User = z.infer; const createUser = (userData: User) => userData.password ? clerkClient.users.createUser({ - externalId: userData.userId, - emailAddress: [userData.email], - firstName: userData.firstName, - lastName: userData.lastName, - passwordDigest: userData.password, - passwordHasher: userData.passwordHasher, - }) + externalId: userData.userId, + emailAddress: [userData.email], + firstName: userData.firstName, + lastName: userData.lastName, + passwordDigest: userData.password, + passwordHasher: userData.passwordHasher, + }) : clerkClient.users.createUser({ - externalId: userData.userId, - emailAddress: [userData.email], - firstName: userData.firstName, - lastName: userData.lastName, - skipPasswordRequirement: true, - }); + externalId: userData.userId, + emailAddress: [userData.email], + firstName: userData.firstName, + lastName: userData.lastName, + skipPasswordRequirement: true, + }); const now = new Date().toISOString().split(".")[0]; // YYYY-MM-DDTHH:mm:ss function appendLog(payload: any) { @@ -78,11 +54,12 @@ let alreadyExists = 0; async function processUserToClerk(userData: User, spinner: Ora) { const txt = spinner.text; try { - const parsedUserData = userSchema.safeParse(userData); + const parsedUserData = authjsUserSchema.safeParse(userData); if (!parsedUserData.success) { throw parsedUserData.error; } - await createUser(parsedUserData.data); + console.log('USER', parsedUserData.data) + // await createUser(parsedUserData.data); migrated++; } catch (error) { @@ -94,7 +71,7 @@ async function processUserToClerk(userData: User, spinner: Ora) { // Keep cooldown in case rate limit is reached as a fallback if the thread blocking fails if (error.status === 429) { - spinner.text = `${txt} - rate limit reached, waiting for ${RETRY_DELAY} ms`; + spinner.text = `${txt} - rate limit reached, waiting for ${env.RETRY_DELAY_MS} ms`; await rateLimitCooldown(); spinner.text = txt; return processUserToClerk(userData, spinner); @@ -105,14 +82,14 @@ async function processUserToClerk(userData: User, spinner: Ora) { } async function cooldown() { - await new Promise((r) => setTimeout(r, DELAY)); + await new Promise((r) => setTimeout(r, env.DELAY)); } async function rateLimitCooldown() { - await new Promise((r) => setTimeout(r, RETRY_DELAY)); + await new Promise((r) => setTimeout(r, env.RETRY_DELAY_MS)); } -async function main() { +async function mainOld() { console.log(`Clerk User Migration Utility`); const inputFileName = process.argv[2] ?? "users.json"; @@ -122,9 +99,9 @@ async function main() { const parsedUserData: any[] = JSON.parse( fs.readFileSync(inputFileName, "utf-8") ); - const offsetUsers = parsedUserData.slice(OFFSET); + const offsetUsers = parsedUserData.slice(env.DELAY); console.log( - `users.json found and parsed, attempting migration with an offset of ${OFFSET}` + `users.json found and parsed, attempting migration with an offset of ${env.OFFSET}` ); let i = 0; @@ -142,7 +119,17 @@ async function main() { return; } -main().then(() => { - console.log(`${migrated} users migrated`); - console.log(`${alreadyExists} users failed to upload`); -}); + + +async function main() { + + const args = await runCLI() + + console.log('PARAMS', args) +} + + + + + +main() diff --git a/package-lock.json b/package-lock.json index e50a911..fd2151c 100644 --- a/package-lock.json +++ b/package-lock.json @@ -9,16 +9,52 @@ "version": "0.0.1", "license": "ISC", "dependencies": { + "@clack/prompts": "^0.7.0", "@clerk/clerk-sdk-node": "^4.12.21", "bun": "^1.0.12", "dotenv": "^16.3.1", "ora": "^7.0.1", + "picocolors": "^1.0.0", "zod": "^3.22.4" }, "bin": { "clerk-user-migration": "index.ts" } }, + "node_modules/@clack/core": { + "version": "0.3.4", + "resolved": "https://registry.npmjs.org/@clack/core/-/core-0.3.4.tgz", + "integrity": "sha512-H4hxZDXgHtWTwV3RAVenqcC4VbJZNegbBjlPvzOzCouXtS2y3sDvlO3IsbrPNWuLWPPlYVYPghQdSF64683Ldw==", + "dependencies": { + "picocolors": "^1.0.0", + "sisteransi": "^1.0.5" + } + }, + "node_modules/@clack/prompts": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/@clack/prompts/-/prompts-0.7.0.tgz", + "integrity": "sha512-0MhX9/B4iL6Re04jPrttDm+BsP8y6mS7byuv0BvXgdXhbV5PdlsHt55dvNsuBCPZ7xq1oTAOOuotR9NFbQyMSA==", + "bundleDependencies": [ + "is-unicode-supported" + ], + "dependencies": { + "@clack/core": "^0.3.3", + "is-unicode-supported": "*", + "picocolors": "^1.0.0", + "sisteransi": "^1.0.5" + } + }, + "node_modules/@clack/prompts/node_modules/is-unicode-supported": { + "version": "1.3.0", + "inBundle": true, + "license": "MIT", + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, "node_modules/@clerk/backend": { "version": "0.34.1", "resolved": "https://registry.npmjs.org/@clerk/backend/-/backend-0.34.1.tgz", @@ -772,6 +808,11 @@ "url": "https://github.com/sponsors/sindresorhus" } }, + "node_modules/picocolors": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", + "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" + }, "node_modules/pvtsutils": { "version": "1.3.5", "resolved": "https://registry.npmjs.org/pvtsutils/-/pvtsutils-1.3.5.tgz", @@ -859,6 +900,11 @@ "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" }, + "node_modules/sisteransi": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", + "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==" + }, "node_modules/snake-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/snake-case/-/snake-case-3.0.4.tgz", diff --git a/package.json b/package.json index 060996b..5f1167d 100644 --- a/package.json +++ b/package.json @@ -10,10 +10,12 @@ "start": "bun index.ts" }, "dependencies": { + "@clack/prompts": "^0.7.0", "@clerk/clerk-sdk-node": "^4.12.21", "bun": "^1.0.12", "dotenv": "^16.3.1", "ora": "^7.0.1", + "picocolors": "^1.0.0", "zod": "^3.22.4" } } diff --git a/samples/auth0.json b/samples/auth0.json new file mode 100644 index 0000000..62a4460 --- /dev/null +++ b/samples/auth0.json @@ -0,0 +1,142 @@ +[ + { + "_id":{ + "$oid":"6573765d9fa97e13efcc3221" + }, + "email":"janedoe@clerk.dev", + "username":"janedoe", + "email_verified":false, + "tenant":"dev-5b88se1iuijo6w1e", + "connection":"Username-Password-Authentication", + "passwordHash":"$2b$10$OW1kjlVtGbGk1fbKG1TQeupVc9RyrA1gA4c8NN1uCNzyxMIA7EN.u", + "_tmp_is_unique":true, + "version":"1.1", + "identifiers":[ + { + "type":"email", + "value":"janedoe@clerk.dev", + "verified":false + }, + { + "type":"username", + "value":"janedoe" + } + ], + "last_password_reset":{ + "$date":"2023-12-08T20:44:31.608Z" + } + }, + { + "_id":{ + "$oid":"657353cd18710d662aeb4e9e" + }, + "email":"johndoe@clerk.dev", + "username":"johndoe", + "email_verified":true, + "tenant":"dev-5b88se1iuijo6w1e", + "connection":"Username-Password-Authentication", + "passwordHash":"$2b$10$o1bU5mlWpsft6RQFZeCfh.6.ixhdeH7fdfJCm2U1g.XX4Ojnxc3Hm", + "_tmp_is_unique":true, + "version":"1.1", + "identifiers":[ + { + "type":"email", + "value":"johndoe@clerk.dev", + "verified":true + }, + { + "type":"username", + "value":"johndoe" + } + ] + }, + { + "_id":{ + "$oid":"657250b0d60f4fff8f69198a" + }, + "email":"janehancock@clerk.dev", + "email_verified":false, + "tenant":"dev-5b88se1iuijo6w1e", + "connection":"Username-Password-Authentication", + "passwordHash":"$2b$10$w51uK4SH.5rPhFvb0zvOQ.MUGYPURPIThya9RriGMoPVtIl4KVycS", + "_tmp_is_unique":true, + "version":"1.1", + "identifiers":[ + { + "type":"email", + "value":"janehancock@clerk.dev", + "verified":false + } + ] + }, + { + "_id":{ + "$oid":"6573d4d69fa97e13efcca49f" + }, + "email":"johnhancock@clerk.com", + "username":"johnhancock", + "email_verified":true, + "tenant":"dev-5b88se1iuijo6w1e", + "connection":"Username-Password-Authentication", + "passwordHash":"$2b$10$qQiiDhcEm3krRmTj9a2lb.Q4M4W/dkVFQUm/aj1jNxWljt0HSNecK", + "_tmp_is_unique":true, + "version":"1.1", + "identifiers":[ + { + "type":"email", + "value":"johnhancock@clerk.com", + "verified":true + }, + { + "type":"username", + "value":"johnhancock" + } + ] + }, + { + "_id":{ + "$oid":"6573813ce94488fb5f75e089" + }, + "email":"elmo@clerk.dev", + "username":"elmo", + "email_verified":true, + "tenant":"dev-5b88se1iuijo6w1e", + "connection":"Username-Password-Authentication", + "passwordHash":"$2b$10$4a8p79G/F11ZWS3/NGOf9eP9ExnXb0EGZf2FUPB5Wc0pzEoHQM3g.", + "_tmp_is_unique":true, + "version":"1.1", + "identifiers":[ + { + "type":"email", + "value":"elmo@clerk.dev", + "verified":true + }, + { + "type":"username", + "value":"elmo" + } + ] + }, + { + "_id":{ + "$oid":"6572b8339fa97e13efcb57d1" + }, + "email":"kermitthefrog@gmail.com", + "email_verified":false, + "tenant":"dev-5b88se1iuijo6w1e", + "connection":"Username-Password-Authentication", + "passwordHash":"$2b$10$sWOjJ1dp8tG/5BrSZcAwce1UAca4gJkZShYcBg1CdmW/BLc8HueJO", + "_tmp_is_unique":true, + "version":"1.1", + "identifiers":[ + { + "type":"email", + "value":"kermitthefrog@gmail.com", + "verified":false + } + ], + "last_password_reset":{ + "$date":"2023-12-08T23:14:58.161Z" + } + } +] diff --git a/src/cli.ts b/src/cli.ts new file mode 100644 index 0000000..8b0fdcd --- /dev/null +++ b/src/cli.ts @@ -0,0 +1,65 @@ + +import * as p from '@clack/prompts' +import color from 'picocolors' + + +export const runCLI = async () => { + p.intro(`${color.bgCyan(color.black('Clerk User Migration Utility'))}`) + + const args = await p.group( + { + source: () => + p.select({ + message: 'What platform are you migrating your users from?', + initialValue: 'authjs', + maxItems: 1, + options: [ + { value: 'authjs', label: 'Auth.js (Next-Auth)' }, + { value: 'auth0', label: 'Auth0' }, + { value: 'supabase', label: 'Supabase' } + ] + }), + file: () => + p.text({ + message: 'Specify the file to use for importing your users', + initialValue: './users.json', + placeholder: './users.json' + }), + instance: () => + p.select({ + message: 'Are you importing your users into a production instance? You should only import into a development instance for testing', + initialValue: 'prod', + maxItems: 1, + options: [ + { value: 'prod', label: 'Prodction' }, + { value: 'dev', label: 'Developetion' } + ] + }), + offset: () => + p.text({ + message: 'Specify an offset to begin importing from.', + defaultValue: '0', + placeholder: '0' + }), + begin: () => + p.confirm({ + message: 'Begin Migration?', + initialValue: true, + }), + }, + { + onCancel: () => { + p.cancel('Migration cancelled.'); + process.exit(0); + }, + } + ) + + if (args.begin) { + console.log('Migration started') + } + + + return args + +} diff --git a/src/env.ts b/src/env.ts new file mode 100644 index 0000000..5f2e052 --- /dev/null +++ b/src/env.ts @@ -0,0 +1,30 @@ + +import { TypeOf, z } from 'zod' +require('dotenv').config() + +// TODO: Revisit if we need this. Left to easily implement +export const withDevDefault = ( + schema: T, + val: TypeOf, +) => (process.env['NODE_ENV'] !== 'production' ? schema.default(val) : schema) + +const envSchema = z.object({ + CLERK_SECRET_KEY: z.string(), + DELAY: z.coerce.number().optional().default(550), + RETRY_DELAY_MS: z.coerce.number().optional().default(10000), + OFFSET: z.coerce.number().optional().default(0), + IMPORT_TO_DEV: z.coerce.boolean().optional().default(false) +}) + +const parsed = envSchema.safeParse(process.env) + +if (!parsed.success) { + console.error( + '❌ Invalid environment variables:', + JSON.stringify(parsed.error.format(), null, 4), + ) + process.exit(1) +} + +export const env = parsed.data + diff --git a/src/spinner.ts b/src/spinner.ts new file mode 100644 index 0000000..e69de29 diff --git a/src/validators.ts b/src/validators.ts new file mode 100644 index 0000000..99cfd03 --- /dev/null +++ b/src/validators.ts @@ -0,0 +1,24 @@ + +import * as z from "zod"; + + +export const authjsUserSchema = z.object({ + userId: z.string(), + email: z.string().email(), + firstName: z.string().optional(), + lastName: z.string().optional(), + password: z.string().optional(), + passwordHasher: z + .enum([ + "argon2i", + "argon2id", + "bcrypt", + "md5", + "pbkdf2_sha256", + "pbkdf2_sha256_django", + "pbkdf2_sha1", + "scrypt_firebase", + ]) + .optional(), +}); + From ee654d04606ae51300490117b6173b1b6362fc8c Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sat, 2 Mar 2024 03:01:16 -0500 Subject: [PATCH 02/31] (wip) Added Supabase validator --- src/validators.ts | 21 +++++++++++++++++++++ 1 file changed, 21 insertions(+) diff --git a/src/validators.ts b/src/validators.ts index 99cfd03..b8b918a 100644 --- a/src/validators.ts +++ b/src/validators.ts @@ -22,3 +22,24 @@ export const authjsUserSchema = z.object({ .optional(), }); + +export const supabaseUserSchema = z.object({ + id: z.string(), + email: z.string().email(), + firstName: z.string().optional(), + lastName: z.string().optional(), + encrypted_password: z.string().optional(), + passwordHasher: z + .enum([ + "argon2i", + "argon2id", + "bcrypt", + "md5", + "pbkdf2_sha256", + "pbkdf2_sha256_django", + "pbkdf2_sha1", + "scrypt_firebase", + ]) + .default('bcrypt'), +}); + From 58fe501e66b6930d1c148d15607217f250d95bc8 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sat, 2 Mar 2024 03:57:28 -0500 Subject: [PATCH 03/31] (wip) Check if the file exists --- index.ts | 11 +++-------- package-lock.json | 39 +++++++++++++++++++++++++++++++++++++++ package.json | 6 ++++++ src/cli.ts | 12 +++++++++--- src/functions.ts | 19 +++++++++++++++++++ 5 files changed, 76 insertions(+), 11 deletions(-) create mode 100644 src/functions.ts diff --git a/index.ts b/index.ts index 4260c70..009624d 100755 --- a/index.ts +++ b/index.ts @@ -11,6 +11,7 @@ import ora, { Ora } from "ora"; import { authjsUserSchema } from "./src/validators"; import { env } from "./src/env"; import { runCLI } from "./src/cli"; +import { checkFileType } from "./src/functions"; if (env.CLERK_SECRET_KEY.split("_")[1] !== "live" && env.IMPORT_TO_DEV === false) { throw new Error( @@ -90,15 +91,7 @@ async function rateLimitCooldown() { } async function mainOld() { - console.log(`Clerk User Migration Utility`); - const inputFileName = process.argv[2] ?? "users.json"; - - console.log(`Fetching users from ${inputFileName}`); - - const parsedUserData: any[] = JSON.parse( - fs.readFileSync(inputFileName, "utf-8") - ); const offsetUsers = parsedUserData.slice(env.DELAY); console.log( `users.json found and parsed, attempting migration with an offset of ${env.OFFSET}` @@ -126,6 +119,8 @@ async function main() { const args = await runCLI() console.log('PARAMS', args) + + checkFileType(args.file) } diff --git a/package-lock.json b/package-lock.json index fd2151c..4d83677 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,13 +12,19 @@ "@clack/prompts": "^0.7.0", "@clerk/clerk-sdk-node": "^4.12.21", "bun": "^1.0.12", + "csv-parse": "^5.5.5", + "csv-parser": "^3.0.0", "dotenv": "^16.3.1", + "mime-types": "^2.1.35", "ora": "^7.0.1", "picocolors": "^1.0.0", "zod": "^3.22.4" }, "bin": { "clerk-user-migration": "index.ts" + }, + "devDependencies": { + "@types/mime-types": "^2.1.4" } }, "node_modules/@clack/core": { @@ -334,6 +340,12 @@ "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==" }, + "node_modules/@types/mime-types": { + "version": "2.1.4", + "resolved": "https://registry.npmjs.org/@types/mime-types/-/mime-types-2.1.4.tgz", + "integrity": "sha512-lfU4b34HOri+kAY5UheuFMWPDOI+OPceBSHZKp69gEyTL/mmJ4cnU6Y/rlme3UL3GyOn6Y42hyIEw0/q8sWx5w==", + "dev": true + }, "node_modules/@types/node": { "version": "16.18.6", "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.6.tgz", @@ -563,6 +575,25 @@ "node": ">= 0.6" } }, + "node_modules/csv-parse": { + "version": "5.5.5", + "resolved": "https://registry.npmjs.org/csv-parse/-/csv-parse-5.5.5.tgz", + "integrity": "sha512-erCk7tyU3yLWAhk6wvKxnyPtftuy/6Ak622gOO7BCJ05+TYffnPCJF905wmOQm+BpkX54OdAl8pveJwUdpnCXQ==" + }, + "node_modules/csv-parser": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/csv-parser/-/csv-parser-3.0.0.tgz", + "integrity": "sha512-s6OYSXAK3IdKqYO33y09jhypG/bSDHPuyCme/IdEHfWpLf/jKcpitVFyOC6UemgGk8v7Q5u2XE0vvwmanxhGlQ==", + "dependencies": { + "minimist": "^1.2.0" + }, + "bin": { + "csv-parser": "bin/csv-parser" + }, + "engines": { + "node": ">= 10" + } + }, "node_modules/deepmerge": { "version": "4.2.2", "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", @@ -758,6 +789,14 @@ "node": ">=6" } }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, "node_modules/no-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", diff --git a/package.json b/package.json index 5f1167d..d27d550 100644 --- a/package.json +++ b/package.json @@ -13,9 +13,15 @@ "@clack/prompts": "^0.7.0", "@clerk/clerk-sdk-node": "^4.12.21", "bun": "^1.0.12", + "csv-parse": "^5.5.5", + "csv-parser": "^3.0.0", "dotenv": "^16.3.1", + "mime-types": "^2.1.35", "ora": "^7.0.1", "picocolors": "^1.0.0", "zod": "^3.22.4" + }, + "devDependencies": { + "@types/mime-types": "^2.1.4" } } diff --git a/src/cli.ts b/src/cli.ts index 8b0fdcd..ed9ecf2 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,6 +1,7 @@ import * as p from '@clack/prompts' import color from 'picocolors' +import { checkIfFileExists } from './functions' export const runCLI = async () => { @@ -22,12 +23,17 @@ export const runCLI = async () => { file: () => p.text({ message: 'Specify the file to use for importing your users', - initialValue: './users.json', - placeholder: './users.json' + initialValue: 'users.json', + placeholder: 'users.json', + validate: (value) => { + if (!checkIfFileExists(value)) { + return "That file does not exist. Please try again" + } + } }), instance: () => p.select({ - message: 'Are you importing your users into a production instance? You should only import into a development instance for testing', + message: 'Are you importing your users into a production instance? You should only import into a development instance for testing. Development instances are limited to 500 users and do not share their userbase with production instances. ', initialValue: 'prod', maxItems: 1, options: [ diff --git a/src/functions.ts b/src/functions.ts new file mode 100644 index 0000000..8f2f2c1 --- /dev/null +++ b/src/functions.ts @@ -0,0 +1,19 @@ +import mime from 'mime-types' +import fs from 'fs'; +import path from 'path' + +const createFilePath = (file: string) => { + return path.join(__dirname, '..', file) +} + +export const checkIfFileExists = (file: string) => { + console.log('file', file) + if (fs.existsSync(createFilePath(file))) { + console.log('exist') + return true + } + else { + console.log('does not exist') + return false + } +} From bc86a10bb0fb5aa9c0c895e954a23314a7a864ec Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sat, 2 Mar 2024 04:16:57 -0500 Subject: [PATCH 04/31] (wip) Reading from .json, and from .csv into JSON --- .gitignore | 4 ++-- index.ts | 6 ++++-- src/cli.ts | 5 ++++- src/functions.ts | 36 ++++++++++++++++++++++++++++++++++++ 4 files changed, 46 insertions(+), 5 deletions(-) diff --git a/.gitignore b/.gitignore index ddff63b..2ab29df 100644 --- a/.gitignore +++ b/.gitignore @@ -1,8 +1,8 @@ node_modules .env users.json -migration-log.json -bun.lockb +users.csv package-lock.json yarn.lock pnpm-lock.yaml +logs diff --git a/index.ts b/index.ts index 009624d..9596669 100755 --- a/index.ts +++ b/index.ts @@ -11,7 +11,7 @@ import ora, { Ora } from "ora"; import { authjsUserSchema } from "./src/validators"; import { env } from "./src/env"; import { runCLI } from "./src/cli"; -import { checkFileType } from "./src/functions"; +import { checkFileType, loadUsersFromFile } from "./src/functions"; if (env.CLERK_SECRET_KEY.split("_")[1] !== "live" && env.IMPORT_TO_DEV === false) { throw new Error( @@ -120,7 +120,9 @@ async function main() { console.log('PARAMS', args) - checkFileType(args.file) + const users = await loadUsersFromFile(args.file) + + console.log(users) } diff --git a/src/cli.ts b/src/cli.ts index ed9ecf2..7f1f29b 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,7 +1,7 @@ import * as p from '@clack/prompts' import color from 'picocolors' -import { checkIfFileExists } from './functions' +import { checkIfFileExists, getFileType } from './functions' export const runCLI = async () => { @@ -29,6 +29,9 @@ export const runCLI = async () => { if (!checkIfFileExists(value)) { return "That file does not exist. Please try again" } + if (getFileType(value) !== 'text/csv' && getFileType(value) !== 'application/json') { + return 'Please supply a valid JSON or CSV file' + } } }), instance: () => diff --git a/src/functions.ts b/src/functions.ts index 8f2f2c1..d1116ea 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -1,6 +1,8 @@ import mime from 'mime-types' import fs from 'fs'; import path from 'path' +import csvParser from 'csv-parser'; + const createFilePath = (file: string) => { return path.join(__dirname, '..', file) @@ -8,6 +10,7 @@ const createFilePath = (file: string) => { export const checkIfFileExists = (file: string) => { console.log('file', file) + if (fs.existsSync(createFilePath(file))) { console.log('exist') return true @@ -17,3 +20,36 @@ export const checkIfFileExists = (file: string) => { return false } } + +export const getFileType = (file: string) => { + return mime.lookup(createFilePath(file)) +} + + +export const loadUsersFromFile = async (file: string) => { + + const type = getFileType(createFilePath(file)) + if (type === "text/csv") { + + const users = [{}]; + return new Promise((resolve, reject) => { + fs.createReadStream(createFilePath(file)) + .pipe(csvParser()) + .on('data', (data) => users.push(data)) + .on('error', (err) => reject(err)) + .on('end', () => { + resolve(users) + }) + }); + } else { + + // TODO: Can we deal with the any here? + const users: any[] = JSON.parse( + fs.readFileSync(createFilePath(file), "utf-8") + ); + + return users + } + +} + From 335b642f533780f1e6949dae017a8f67ae4ef84c Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sat, 2 Mar 2024 13:22:59 -0500 Subject: [PATCH 05/31] (wip) Moved validator list to constant, generate CLI options on demand from that constant --- index.ts | 6 +++--- src/cli.ts | 16 ++++++++-------- src/{env.ts => envs-constants.ts} | 8 ++++++++ src/functions.ts | 17 +++++++++++++++++ 4 files changed, 36 insertions(+), 11 deletions(-) rename src/{env.ts => envs-constants.ts} (76%) diff --git a/index.ts b/index.ts index 9596669..ea4fe86 100755 --- a/index.ts +++ b/index.ts @@ -9,9 +9,9 @@ import * as z from "zod"; import clerkClient from "@clerk/clerk-sdk-node"; import ora, { Ora } from "ora"; import { authjsUserSchema } from "./src/validators"; -import { env } from "./src/env"; +import { env } from "./src/envs-constants"; import { runCLI } from "./src/cli"; -import { checkFileType, loadUsersFromFile } from "./src/functions"; +import { loadUsersFromFile } from "./src/functions"; if (env.CLERK_SECRET_KEY.split("_")[1] !== "live" && env.IMPORT_TO_DEV === false) { throw new Error( @@ -122,7 +122,7 @@ async function main() { const users = await loadUsersFromFile(args.file) - console.log(users) + } diff --git a/src/cli.ts b/src/cli.ts index 7f1f29b..a0f6a4b 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,24 +1,23 @@ import * as p from '@clack/prompts' import color from 'picocolors' -import { checkIfFileExists, getFileType } from './functions' +import { checkIfFileExists, createValidatorOptions, getFileType } from './functions' +import { VALIDATORS } from './envs-constants' export const runCLI = async () => { p.intro(`${color.bgCyan(color.black('Clerk User Migration Utility'))}`) + const options = createValidatorOptions() + const args = await p.group( { source: () => p.select({ message: 'What platform are you migrating your users from?', - initialValue: 'authjs', + initialValue: options[0].value, maxItems: 1, - options: [ - { value: 'authjs', label: 'Auth.js (Next-Auth)' }, - { value: 'auth0', label: 'Auth0' }, - { value: 'supabase', label: 'Supabase' } - ] + options: options }), file: () => p.text({ @@ -36,7 +35,7 @@ export const runCLI = async () => { }), instance: () => p.select({ - message: 'Are you importing your users into a production instance? You should only import into a development instance for testing. Development instances are limited to 500 users and do not share their userbase with production instances. ', + message: 'Are you importing your users into a production instance? Development instances are for testing and limited t0 500 users.', initialValue: 'prod', maxItems: 1, options: [ @@ -72,3 +71,4 @@ export const runCLI = async () => { return args } + diff --git a/src/env.ts b/src/envs-constants.ts similarity index 76% rename from src/env.ts rename to src/envs-constants.ts index 5f2e052..44ad460 100644 --- a/src/env.ts +++ b/src/envs-constants.ts @@ -28,3 +28,11 @@ if (!parsed.success) { export const env = parsed.data + +export const VALIDATORS = [ + { value: 'authjs', label: 'Auth.js (Next-Auth)', schema: 'authjsUseerSchema' }, + { value: 'auth0', label: 'Auth0', schema: 'authoUserSchema' }, + { value: 'supabase', label: 'Supabase', schema: 'supabaseUserSchems' } + +] + diff --git a/src/functions.ts b/src/functions.ts index d1116ea..bccb659 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -2,6 +2,8 @@ import mime from 'mime-types' import fs from 'fs'; import path from 'path' import csvParser from 'csv-parser'; +import { VALIDATORS } from './envs-constants'; +// import { Option } from '@clack/prompts'; const createFilePath = (file: string) => { @@ -50,6 +52,21 @@ export const loadUsersFromFile = async (file: string) => { return users } +} +// emulate what Clack expects for an option in a Select / MultiSelect +export type OptionType = { + value: string; + label: string | undefined; + hint?: string | undefined; } +export const createValidatorOptions = () => { + const options: OptionType[] = []; + + for (const validator of VALIDATORS) { + options.push({ "value": validator.value, "label": validator.label }) + } + + return options +} From 5a28fc69f881c0dd505c3ad61f360e705ab35d91 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sat, 2 Mar 2024 14:06:10 -0500 Subject: [PATCH 06/31] (wip) Moved validators into their own directory, added metadata to each file, generated list from files --- index.ts | 3 +- src/cli.ts | 4 +- src/envs-constants.ts | 37 ++++++++++++++++--- src/functions.ts | 15 ++++++++ src/validators/authjsValidator.ts | 30 +++++++++++++++ .../supabaseValidator.ts} | 27 +++----------- 6 files changed, 85 insertions(+), 31 deletions(-) create mode 100644 src/validators/authjsValidator.ts rename src/{validators.ts => validators/supabaseValidator.ts} (52%) diff --git a/index.ts b/index.ts index ea4fe86..7c04b36 100755 --- a/index.ts +++ b/index.ts @@ -8,7 +8,7 @@ import * as fs from "fs"; import * as z from "zod"; import clerkClient from "@clerk/clerk-sdk-node"; import ora, { Ora } from "ora"; -import { authjsUserSchema } from "./src/validators"; +import { authjsUserSchema } from "./src/validators/authjsValidator"; import { env } from "./src/envs-constants"; import { runCLI } from "./src/cli"; import { loadUsersFromFile } from "./src/functions"; @@ -123,6 +123,7 @@ async function main() { const users = await loadUsersFromFile(args.file) + } diff --git a/src/cli.ts b/src/cli.ts index a0f6a4b..7ac797b 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,14 +1,14 @@ import * as p from '@clack/prompts' import color from 'picocolors' -import { checkIfFileExists, createValidatorOptions, getFileType } from './functions' +import { authjsFirstSort, checkIfFileExists, createValidatorOptions, getFileType } from './functions' import { VALIDATORS } from './envs-constants' export const runCLI = async () => { p.intro(`${color.bgCyan(color.black('Clerk User Migration Utility'))}`) - const options = createValidatorOptions() + const options = createValidatorOptions().sort(authjsFirstSort) const args = await p.group( { diff --git a/src/envs-constants.ts b/src/envs-constants.ts index 44ad460..7b7ec27 100644 --- a/src/envs-constants.ts +++ b/src/envs-constants.ts @@ -1,6 +1,10 @@ import { TypeOf, z } from 'zod' -require('dotenv').config() +import * as fs from 'fs'; +import * as path from 'path'; +import { config } from "dotenv"; +config(); +// require('dotenv').config() // TODO: Revisit if we need this. Left to easily implement export const withDevDefault = ( @@ -29,10 +33,31 @@ if (!parsed.success) { export const env = parsed.data -export const VALIDATORS = [ - { value: 'authjs', label: 'Auth.js (Next-Auth)', schema: 'authjsUseerSchema' }, - { value: 'auth0', label: 'Auth0', schema: 'authoUserSchema' }, - { value: 'supabase', label: 'Supabase', schema: 'supabaseUserSchems' } +// Dynamically read what validators are present and generate array for use in script -] +type Validator = { + value: string; + label: string; + schema: string; +}; +// +const validatorsDirectory = path.join(__dirname, '/validators'); +export const VALIDATORS: Validator[] = []; +const files = fs.readdirSync(validatorsDirectory); + + +files.forEach((file) => { + if (file.endsWith('.ts')) { + const filePath = path.join(validatorsDirectory, file); + const validatorModule = require(filePath); // Use `require` for dynamic imports in Node.js + + if (validatorModule.options && validatorModule.options.value && validatorModule.options.schema) { + VALIDATORS.push({ + value: validatorModule.options.value, + label: validatorModule.options.label || '', + schema: validatorModule.options.schema, + }); + } + } +}); diff --git a/src/functions.ts b/src/functions.ts index bccb659..08b8909 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -70,3 +70,18 @@ export const createValidatorOptions = () => { return options } + +// export const selectSchema (selectedSchema:string) => { +// +// } +// + + +export const authjsFirstSort = (a: any, b: any): number => { + // If 'authjs' is present in either 'a' or 'b', prioritize it + if (a.value === 'authjs') return -1; + if (b.value === 'authjs') return 1; + + // Otherwise, maintain the original order + return 0; +}; diff --git a/src/validators/authjsValidator.ts b/src/validators/authjsValidator.ts new file mode 100644 index 0000000..d20f820 --- /dev/null +++ b/src/validators/authjsValidator.ts @@ -0,0 +1,30 @@ +import * as z from "zod"; + +export const options = { + value: 'authjs', + label: 'Authjs (Next-Auth)', + schema: 'authjsUserSchema' +} + +export const authjsUserSchema = z.object({ + userId: z.string(), + email: z.string().email(), + firstName: z.string().optional(), + lastName: z.string().optional(), + password: z.string().optional(), + passwordHasher: z + .enum([ + "argon2i", + "argon2id", + "bcrypt", + "md5", + "pbkdf2_sha256", + "pbkdf2_sha256_django", + "pbkdf2_sha1", + "scrypt_firebase", + ]) + .optional(), +}); + + + diff --git a/src/validators.ts b/src/validators/supabaseValidator.ts similarity index 52% rename from src/validators.ts rename to src/validators/supabaseValidator.ts index b8b918a..1654496 100644 --- a/src/validators.ts +++ b/src/validators/supabaseValidator.ts @@ -1,27 +1,10 @@ - import * as z from "zod"; - -export const authjsUserSchema = z.object({ - userId: z.string(), - email: z.string().email(), - firstName: z.string().optional(), - lastName: z.string().optional(), - password: z.string().optional(), - passwordHasher: z - .enum([ - "argon2i", - "argon2id", - "bcrypt", - "md5", - "pbkdf2_sha256", - "pbkdf2_sha256_django", - "pbkdf2_sha1", - "scrypt_firebase", - ]) - .optional(), -}); - +export const options = { + value: 'supabase', + label: 'Supabase', + schema: 'supabaseUserSchema' +} export const supabaseUserSchema = z.object({ id: z.string(), From a43625b6dc2fde6c9ba301e6f3d05d76ab5be8a4 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sat, 2 Mar 2024 15:41:43 -0500 Subject: [PATCH 07/31] (wip) Minor improvements,some more typing --- index.ts | 22 +++++++++++++++------- package.json | 1 - src/cli.ts | 2 -- src/functions.ts | 27 +++++++++++++++++++++------ src/validators/authjsValidator.ts | 4 ++-- src/validators/supabaseValidator.ts | 3 ++- 6 files changed, 40 insertions(+), 19 deletions(-) diff --git a/index.ts b/index.ts index 7c04b36..f957925 100755 --- a/index.ts +++ b/index.ts @@ -5,13 +5,14 @@ import color from 'picocolors' import { setTimeout } from 'node:timers/promises'; import * as fs from "fs"; +import * as path from 'path'; import * as z from "zod"; import clerkClient from "@clerk/clerk-sdk-node"; import ora, { Ora } from "ora"; -import { authjsUserSchema } from "./src/validators/authjsValidator"; +import authjsUserSchema from "./src/validators/authjsValidator"; import { env } from "./src/envs-constants"; import { runCLI } from "./src/cli"; -import { loadUsersFromFile } from "./src/functions"; +import { loadUsersFromFile, loadValidator } from "./src/functions"; if (env.CLERK_SECRET_KEY.split("_")[1] !== "live" && env.IMPORT_TO_DEV === false) { throw new Error( @@ -92,10 +93,6 @@ async function rateLimitCooldown() { async function mainOld() { - const offsetUsers = parsedUserData.slice(env.DELAY); - console.log( - `users.json found and parsed, attempting migration with an offset of ${env.OFFSET}` - ); let i = 0; const spinner = ora(`Migrating users`).start(); @@ -120,9 +117,20 @@ async function main() { console.log('PARAMS', args) - const users = await loadUsersFromFile(args.file) + const userSchema = loadValidator(args.source) + type User = z.infer; + + + console.log(userSchema) + + + const users = await loadUsersFromFile(args.file, args.source) + + + const usersToImport = users.slice(parseInt(args.offset) > env.OFFSET ? parseInt(args.offset) : env.OFFSET); + importUsers(usersToImport, userSchema, args) } diff --git a/package.json b/package.json index d27d550..7fbbeed 100644 --- a/package.json +++ b/package.json @@ -13,7 +13,6 @@ "@clack/prompts": "^0.7.0", "@clerk/clerk-sdk-node": "^4.12.21", "bun": "^1.0.12", - "csv-parse": "^5.5.5", "csv-parser": "^3.0.0", "dotenv": "^16.3.1", "mime-types": "^2.1.35", diff --git a/src/cli.ts b/src/cli.ts index 7ac797b..95bd695 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -2,8 +2,6 @@ import * as p from '@clack/prompts' import color from 'picocolors' import { authjsFirstSort, checkIfFileExists, createValidatorOptions, getFileType } from './functions' -import { VALIDATORS } from './envs-constants' - export const runCLI = async () => { p.intro(`${color.bgCyan(color.black('Clerk User Migration Utility'))}`) diff --git a/src/functions.ts b/src/functions.ts index 08b8909..7df953e 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -3,6 +3,7 @@ import fs from 'fs'; import path from 'path' import csvParser from 'csv-parser'; import { VALIDATORS } from './envs-constants'; +import * as z from "zod"; // import { Option } from '@clack/prompts'; @@ -28,8 +29,11 @@ export const getFileType = (file: string) => { } -export const loadUsersFromFile = async (file: string) => { +export const loadUsersFromFile = async (file: string, source: string) => { + // const userSchema = loadValidator(source) + // type User = z.infer; + // const type = getFileType(createFilePath(file)) if (type === "text/csv") { @@ -46,7 +50,7 @@ export const loadUsersFromFile = async (file: string) => { } else { // TODO: Can we deal with the any here? - const users: any[] = JSON.parse( + const users = JSON.parse( fs.readFileSync(createFilePath(file), "utf-8") ); @@ -71,10 +75,21 @@ export const createValidatorOptions = () => { return options } -// export const selectSchema (selectedSchema:string) => { -// -// } -// +export const loadValidator = (validatorName: string) => { + const validatorsDirectory = path.join(__dirname, 'validators'); + + const filePath = path.join(validatorsDirectory, `${validatorName}Validator`); + const validatorModule = require(filePath); + + const userSchema = validatorModule.default; + + console.log(`Imported:`, userSchema); + + return userSchema + + +} + export const authjsFirstSort = (a: any, b: any): number => { diff --git a/src/validators/authjsValidator.ts b/src/validators/authjsValidator.ts index d20f820..0bed838 100644 --- a/src/validators/authjsValidator.ts +++ b/src/validators/authjsValidator.ts @@ -6,7 +6,7 @@ export const options = { schema: 'authjsUserSchema' } -export const authjsUserSchema = z.object({ +const authjsUserSchema = z.object({ userId: z.string(), email: z.string().email(), firstName: z.string().optional(), @@ -26,5 +26,5 @@ export const authjsUserSchema = z.object({ .optional(), }); - +export default authjsUserSchema diff --git a/src/validators/supabaseValidator.ts b/src/validators/supabaseValidator.ts index 1654496..7bb4c9a 100644 --- a/src/validators/supabaseValidator.ts +++ b/src/validators/supabaseValidator.ts @@ -6,7 +6,7 @@ export const options = { schema: 'supabaseUserSchema' } -export const supabaseUserSchema = z.object({ +const supabaseUserSchema = z.object({ id: z.string(), email: z.string().email(), firstName: z.string().optional(), @@ -26,3 +26,4 @@ export const supabaseUserSchema = z.object({ .default('bcrypt'), }); +export default supabaseUserSchema From bdad2f659cfb39c924209d2940c8383e348d70c9 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sun, 3 Mar 2024 03:05:38 -0500 Subject: [PATCH 08/31] (wip) Refactored to transform incoming data to expected schema --- index.ts | 169 +++++++++++-------------- package-lock.json | 36 +++++- package.json | 1 + samples/clerk.csv | 5 + src/cli.ts | 9 +- src/envs-constants.ts | 28 ----- src/functions.ts | 183 ++++++++++++++++++++-------- src/handlers/authjsHandler.ts | 14 +++ src/handlers/clerkHandler.ts | 15 +++ src/handlers/supabaseHandler.ts | 12 ++ src/validators/authjsValidator.ts | 30 ----- src/validators/supabaseValidator.ts | 29 ----- 12 files changed, 281 insertions(+), 250 deletions(-) create mode 100644 samples/clerk.csv create mode 100644 src/handlers/authjsHandler.ts create mode 100644 src/handlers/clerkHandler.ts create mode 100644 src/handlers/supabaseHandler.ts delete mode 100644 src/validators/authjsValidator.ts delete mode 100644 src/validators/supabaseValidator.ts diff --git a/index.ts b/index.ts index f957925..6150bfa 100755 --- a/index.ts +++ b/index.ts @@ -1,5 +1,6 @@ import { config } from "dotenv"; config(); + import * as p from '@clack/prompts'; import color from 'picocolors' import { setTimeout } from 'node:timers/promises'; @@ -7,12 +8,13 @@ import { setTimeout } from 'node:timers/promises'; import * as fs from "fs"; import * as path from 'path'; import * as z from "zod"; -import clerkClient from "@clerk/clerk-sdk-node"; +import clerkClient, { User } from "@clerk/clerk-sdk-node"; import ora, { Ora } from "ora"; -import authjsUserSchema from "./src/validators/authjsValidator"; import { env } from "./src/envs-constants"; import { runCLI } from "./src/cli"; import { loadUsersFromFile, loadValidator } from "./src/functions"; +import { importUsers } from "./src/import-users"; +import authjsUserSchema from "./src/transformers/authjsTransfomer"; if (env.CLERK_SECRET_KEY.split("_")[1] !== "live" && env.IMPORT_TO_DEV === false) { throw new Error( @@ -21,116 +23,85 @@ if (env.CLERK_SECRET_KEY.split("_")[1] !== "live" && env.IMPORT_TO_DEV === false } +// +// type User = z.infer; +// +// const createUser = (userData: User) => +// userData.password +// ? clerkClient.users.createUser({ +// externalId: userData.userId, +// emailAddress: [userData.email], +// firstName: userData.firstName, +// lastName: userData.lastName, +// passwordDigest: userData.password, +// passwordHasher: userData.passwordHasher, +// }) +// : clerkClient.users.createUser({ +// externalId: userData.userId, +// emailAddress: [userData.email], +// firstName: userData.firstName, +// lastName: userData.lastName, +// skipPasswordRequirement: true, +// }); +// +// const now = new Date().toISOString().split(".")[0]; // YYYY-MM-DDTHH:mm:ss +// function appendLog(payload: any) { +// fs.appendFileSync( +// `./migration-log-${now}.json`, +// `\n${JSON.stringify(payload, null, 2)}` +// ); +// } +// let migrated = 0; +// let alreadyExists = 0; +// +// async function processUserToClerk(userData: User, spinner: Ora) { +// const txt = spinner.text; +// try { +// const parsedUserData = authjsUserSchema.safeParse(userData); +// if (!parsedUserData.success) { +// throw parsedUserData.error; +// } +// console.log('USER', parsedUserData.data) +// // await createUser(parsedUserData.data); +// +// migrated++; +// } catch (error) { +// if (error.status === 422) { +// appendLog({ userId: userData.userId, ...error }); +// alreadyExists++; +// return; +// } +// +// // Keep cooldown in case rate limit is reached as a fallback if the thread blocking fails +// if (error.status === 429) { +// spinner.text = `${txt} - rate limit reached, waiting for ${env.RETRY_DELAY_MS} ms`; +// await rateLimitCooldown(); +// spinner.text = txt; +// return processUserToClerk(userData, spinner); +// } +// +// appendLog({ userId: userData.userId, ...error }); +// } +// } -type User = z.infer; - -const createUser = (userData: User) => - userData.password - ? clerkClient.users.createUser({ - externalId: userData.userId, - emailAddress: [userData.email], - firstName: userData.firstName, - lastName: userData.lastName, - passwordDigest: userData.password, - passwordHasher: userData.passwordHasher, - }) - : clerkClient.users.createUser({ - externalId: userData.userId, - emailAddress: [userData.email], - firstName: userData.firstName, - lastName: userData.lastName, - skipPasswordRequirement: true, - }); - -const now = new Date().toISOString().split(".")[0]; // YYYY-MM-DDTHH:mm:ss -function appendLog(payload: any) { - fs.appendFileSync( - `./migration-log-${now}.json`, - `\n${JSON.stringify(payload, null, 2)}` - ); -} - -let migrated = 0; -let alreadyExists = 0; - -async function processUserToClerk(userData: User, spinner: Ora) { - const txt = spinner.text; - try { - const parsedUserData = authjsUserSchema.safeParse(userData); - if (!parsedUserData.success) { - throw parsedUserData.error; - } - console.log('USER', parsedUserData.data) - // await createUser(parsedUserData.data); - - migrated++; - } catch (error) { - if (error.status === 422) { - appendLog({ userId: userData.userId, ...error }); - alreadyExists++; - return; - } - - // Keep cooldown in case rate limit is reached as a fallback if the thread blocking fails - if (error.status === 429) { - spinner.text = `${txt} - rate limit reached, waiting for ${env.RETRY_DELAY_MS} ms`; - await rateLimitCooldown(); - spinner.text = txt; - return processUserToClerk(userData, spinner); - } - - appendLog({ userId: userData.userId, ...error }); - } -} - -async function cooldown() { - await new Promise((r) => setTimeout(r, env.DELAY)); -} - -async function rateLimitCooldown() { - await new Promise((r) => setTimeout(r, env.RETRY_DELAY_MS)); -} - -async function mainOld() { - - - let i = 0; - const spinner = ora(`Migrating users`).start(); - - for (const userData of offsetUsers) { - spinner.text = `Migrating user ${i}/${offsetUsers.length}, cooldown`; - await cooldown(); - i++; - spinner.text = `Migrating user ${i}/${offsetUsers.length}`; - await processUserToClerk(userData, spinner); - } - - spinner.succeed(`Migration complete`); - return; -} async function main() { + console.log('TEST') const args = await runCLI() - console.log('PARAMS', args) + // const userSchema = loadValidator(args.source) + // type User = z.infer; - const userSchema = loadValidator(args.source) - type User = z.infer; - - - console.log(userSchema) - - - const users = await loadUsersFromFile(args.file, args.source) + const users = await loadUsersFromFile(args.file, args.key) + console.log(users) const usersToImport = users.slice(parseInt(args.offset) > env.OFFSET ? parseInt(args.offset) : env.OFFSET); - - importUsers(usersToImport, userSchema, args) + importUsers(usersToImport, args) } diff --git a/package-lock.json b/package-lock.json index 4d83677..0140910 100644 --- a/package-lock.json +++ b/package-lock.json @@ -12,12 +12,12 @@ "@clack/prompts": "^0.7.0", "@clerk/clerk-sdk-node": "^4.12.21", "bun": "^1.0.12", - "csv-parse": "^5.5.5", "csv-parser": "^3.0.0", "dotenv": "^16.3.1", "mime-types": "^2.1.35", "ora": "^7.0.1", "picocolors": "^1.0.0", + "segfault-handler": "^1.3.0", "zod": "^3.22.4" }, "bin": { @@ -52,6 +52,7 @@ }, "node_modules/@clack/prompts/node_modules/is-unicode-supported": { "version": "1.3.0", + "extraneous": true, "inBundle": true, "license": "MIT", "engines": { @@ -437,6 +438,14 @@ } ] }, + "node_modules/bindings": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", + "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", + "dependencies": { + "file-uri-to-path": "1.0.0" + } + }, "node_modules/bl": { "version": "5.1.0", "resolved": "https://registry.npmjs.org/bl/-/bl-5.1.0.tgz", @@ -575,11 +584,6 @@ "node": ">= 0.6" } }, - "node_modules/csv-parse": { - "version": "5.5.5", - "resolved": "https://registry.npmjs.org/csv-parse/-/csv-parse-5.5.5.tgz", - "integrity": "sha512-erCk7tyU3yLWAhk6wvKxnyPtftuy/6Ak622gOO7BCJ05+TYffnPCJF905wmOQm+BpkX54OdAl8pveJwUdpnCXQ==" - }, "node_modules/csv-parser": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/csv-parser/-/csv-parser-3.0.0.tgz", @@ -640,6 +644,11 @@ "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.3.0.tgz", "integrity": "sha512-QpLs9D9v9kArv4lfDEgg1X/gN5XLnf/A6l9cs8SPZLRZR3ZkY9+kwIQTxm+fsSej5UMYGE8fdoaZVIBlqG0XTw==" }, + "node_modules/file-uri-to-path": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", + "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" + }, "node_modules/form-data": { "version": "3.0.1", "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", @@ -797,6 +806,11 @@ "url": "https://github.com/sponsors/ljharb" } }, + "node_modules/nan": { + "version": "2.18.0", + "resolved": "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz", + "integrity": "sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w==" + }, "node_modules/no-case": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", @@ -934,6 +948,16 @@ } ] }, + "node_modules/segfault-handler": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/segfault-handler/-/segfault-handler-1.3.0.tgz", + "integrity": "sha512-p7kVHo+4uoYkr0jmIiTBthwV5L2qmWtben/KDunDZ834mbos+tY+iO0//HpAJpOFSQZZ+wxKWuRo4DxV02B7Lg==", + "hasInstallScript": true, + "dependencies": { + "bindings": "^1.2.1", + "nan": "^2.14.0" + } + }, "node_modules/signal-exit": { "version": "3.0.7", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", diff --git a/package.json b/package.json index 7fbbeed..6d4f8af 100644 --- a/package.json +++ b/package.json @@ -18,6 +18,7 @@ "mime-types": "^2.1.35", "ora": "^7.0.1", "picocolors": "^1.0.0", + "segfault-handler": "^1.3.0", "zod": "^3.22.4" }, "devDependencies": { diff --git a/samples/clerk.csv b/samples/clerk.csv new file mode 100644 index 0000000..126a0eb --- /dev/null +++ b/samples/clerk.csv @@ -0,0 +1,5 @@ +id,first_name,last_name,username,email_addresses,phone_numbers,totp_secret,password_digest,password_hasher,unsafe_metadata,public_metadata,private_metadata +user_2YDryYFVMM1W1plDDKz7Gzf4we6,Jane,Doe,,janedoe@clerk.dev,,,,bcrypt,{},,{} +user_2ZZCgLE7kJG2CRBxTZ6YUIvzS10,John,Doe,,johndoe@gmail.com,,,,,{},{"discord": {"step": "final"}},{} +user_2cWszPHuo6P2lCdnhhZbVMfbAIC,John,Hancock,,johnhncock@clerk.dev,,,,,{},{"discord": {"step": "discord"}},{} +user_2cukOsyNsh0J3MCEvrgM6PkoB0I,Jane,Hancock,,janehancock@clerk.dev,,,,,{},{},{} diff --git a/src/cli.ts b/src/cli.ts index 95bd695..d39c2d5 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,16 +1,16 @@ import * as p from '@clack/prompts' import color from 'picocolors' -import { authjsFirstSort, checkIfFileExists, createValidatorOptions, getFileType } from './functions' - +import { checkIfFileExists, createHandlerOptions, getFileType } from './functions' +// export const runCLI = async () => { p.intro(`${color.bgCyan(color.black('Clerk User Migration Utility'))}`) - const options = createValidatorOptions().sort(authjsFirstSort) + const options = createHandlerOptions() const args = await p.group( { - source: () => + key: () => p.select({ message: 'What platform are you migrating your users from?', initialValue: options[0].value, @@ -65,7 +65,6 @@ export const runCLI = async () => { console.log('Migration started') } - return args } diff --git a/src/envs-constants.ts b/src/envs-constants.ts index 7b7ec27..c17edcd 100644 --- a/src/envs-constants.ts +++ b/src/envs-constants.ts @@ -33,31 +33,3 @@ if (!parsed.success) { export const env = parsed.data -// Dynamically read what validators are present and generate array for use in script - -type Validator = { - value: string; - label: string; - schema: string; -}; - -// -const validatorsDirectory = path.join(__dirname, '/validators'); -export const VALIDATORS: Validator[] = []; -const files = fs.readdirSync(validatorsDirectory); - - -files.forEach((file) => { - if (file.endsWith('.ts')) { - const filePath = path.join(validatorsDirectory, file); - const validatorModule = require(filePath); // Use `require` for dynamic imports in Node.js - - if (validatorModule.options && validatorModule.options.value && validatorModule.options.schema) { - VALIDATORS.push({ - value: validatorModule.options.value, - label: validatorModule.options.label || '', - schema: validatorModule.options.schema, - }); - } - } -}); diff --git a/src/functions.ts b/src/functions.ts index 7df953e..b04d742 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -1,102 +1,179 @@ -import mime from 'mime-types' + import fs from 'fs'; import path from 'path' +import mime from 'mime-types' import csvParser from 'csv-parser'; -import { VALIDATORS } from './envs-constants'; import * as z from "zod"; -// import { Option } from '@clack/prompts'; +type Handler = { + key: string; + label: string; + transformer: any; +}; -const createFilePath = (file: string) => { +// Dynamically read what handlers are present and generate array for use in script +const handlersDirectory = path.join(__dirname, '/handlers'); +export const handlers: Handler[] = []; +const files = fs.readdirSync(handlersDirectory); + +files.forEach((file) => { + if (file.endsWith('.ts')) { + const filePath = path.join(handlersDirectory, file); + const handlerModule = require(filePath); + + if (handlerModule.options && handlerModule.options.key && handlerModule.options.transformer) { + handlers.push({ + key: handlerModule.options.key, + label: handlerModule.options.label || '', + transformer: handlerModule.options.transformer + }); + } + } +}); + +// default schema -- incoming data will be transformed to this format +export const userSchema = z.object({ + userId: z.string(), + email: z.string().email(), + firstName: z.string().optional(), + lastName: z.string().optional(), + password: z.string().optional(), + passwordHasher: z + .enum([ + "argon2i", + "argon2id", + "bcrypt", + "md5", + "pbkdf2_sha256", + "pbkdf2_sha256_django", + "pbkdf2_sha1", + "scrypt_firebase", + ]) + .optional(), +}); + +export type User = z.infer; + + +// utility function to create file path +const createImportFilePath = (file: string) => { return path.join(__dirname, '..', file) } -export const checkIfFileExists = (file: string) => { - console.log('file', file) - if (fs.existsSync(createFilePath(file))) { - console.log('exist') +// make sure the file exists. CLI will error if it doesn't +export const checkIfFileExists = (file: string) => { + if (fs.existsSync(createImportFilePath(file))) { return true } else { - console.log('does not exist') return false } } +// get the file type so we can verify if this is a JSON or CSV export const getFileType = (file: string) => { - return mime.lookup(createFilePath(file)) + return mime.lookup(createImportFilePath(file)) +} + +// emulate what Clack CLI expects for an option in a Select / MultiSelect +export type OptionType = { + value: string; + label: string | undefined; + hint?: string | undefined; } +// handlers is an array created from the files in /src/validators +// generate an array of options for use in the CLI +export const createHandlerOptions = () => { + const options: OptionType[] = []; + + for (const handler of handlers) { + options.push({ "value": handler.key, "label": handler.label }) + } + return options +} + +// transform incoming data datas to match default schema +// TODO : Remove any -- not sure how to handle this +export const transformKeys = (data: Record, keys: any): Record => { + + const transformedData: Record = {}; + for (const key in data) { + if (data.hasOwnProperty(key)) { + let transformedKey = key; + if (keys.transformer[key]) transformedKey = keys.transformer[key] + + transformedData[transformedKey] = data[key]; + } + } + return transformedData; +}; + + +export const loadUsersFromFile = async (file: string, key: string) => { -export const loadUsersFromFile = async (file: string, source: string) => { + const type = getFileType(createImportFilePath(file)) - // const userSchema = loadValidator(source) - // type User = z.infer; - // - const type = getFileType(createFilePath(file)) + const transformerKeys = handlers.find(obj => obj.key === key); + + // convert a CSV to JSON and return array if (type === "text/csv") { - const users = [{}]; + const users: User[] = []; return new Promise((resolve, reject) => { - fs.createReadStream(createFilePath(file)) + fs.createReadStream(createImportFilePath(file)) .pipe(csvParser()) - .on('data', (data) => users.push(data)) + .on('data', (data) => { + users.push(data) + }) .on('error', (err) => reject(err)) .on('end', () => { resolve(users) }) }); + + // if the file is already JSON, just read and parse and return the result } else { - // TODO: Can we deal with the any here? - const users = JSON.parse( - fs.readFileSync(createFilePath(file), "utf-8") + const users: User[] = JSON.parse( + fs.readFileSync(createImportFilePath(file), "utf-8") ); - return users - } -} - -// emulate what Clack expects for an option in a Select / MultiSelect -export type OptionType = { - value: string; - label: string | undefined; - hint?: string | undefined; -} - -export const createValidatorOptions = () => { - const options: OptionType[] = []; - - for (const validator of VALIDATORS) { - options.push({ "value": validator.value, "label": validator.label }) - } - - return options -} + const transformedData: User[] = []; -export const loadValidator = (validatorName: string) => { - const validatorsDirectory = path.join(__dirname, 'validators'); + for (const user of users) { + // = transformKeys(users) + const transformedUser = transformKeys(user, transformerKeys) - const filePath = path.join(validatorsDirectory, `${validatorName}Validator`); - const validatorModule = require(filePath); + const validationResult = userSchema.safeParse(transformedUser) - const userSchema = validatorModule.default; - - console.log(`Imported:`, userSchema); - - return userSchema + // Check if validation was successful + if (validationResult.success) { + // The data is valid according to the original schema + const validatedData = validationResult.data; + transformedData.push(validatedData) + } else { + // The data is not valid, handle errors + console.error('Validation Errors:', validationResult.error.errors); + } + } + // console.log('transformed data', JSON.stringify(transformedData)) + return transformedData + } } - +// Make sure that Auth.js is the first option for the script export const authjsFirstSort = (a: any, b: any): number => { // If 'authjs' is present in either 'a' or 'b', prioritize it - if (a.value === 'authjs') return -1; - if (b.value === 'authjs') return 1; + if (a.key === 'authjs') return -1; + if (b.key === 'authjs') return 1; // Otherwise, maintain the original order return 0; }; + + diff --git a/src/handlers/authjsHandler.ts b/src/handlers/authjsHandler.ts new file mode 100644 index 0000000..d1fded9 --- /dev/null +++ b/src/handlers/authjsHandler.ts @@ -0,0 +1,14 @@ +import * as z from "zod"; + +export const options = { + key: 'authjs', + label: 'Authjs (Next-Auth)', + transformer: { + id: "userId", + email_addresses: "email", + first_name: "firstName", + last_name: "lastName" + } + +} + diff --git a/src/handlers/clerkHandler.ts b/src/handlers/clerkHandler.ts new file mode 100644 index 0000000..1aa60d6 --- /dev/null +++ b/src/handlers/clerkHandler.ts @@ -0,0 +1,15 @@ +import * as z from "zod"; + +export const options = { + key: 'clerk', + label: 'Clerk', + transformer: { + id: "userId", + email_addresses: "email", + first_name: "firstName", + last_name: "lastName" + } +} + + + diff --git a/src/handlers/supabaseHandler.ts b/src/handlers/supabaseHandler.ts new file mode 100644 index 0000000..955430a --- /dev/null +++ b/src/handlers/supabaseHandler.ts @@ -0,0 +1,12 @@ +import * as z from "zod"; + +export const options = { + key: 'supabase', + label: 'Supabase', + transformer: { + id: "userId", + email_addresses: "email", + first_name: "firstName", + last_name: "lastName" + } +} diff --git a/src/validators/authjsValidator.ts b/src/validators/authjsValidator.ts deleted file mode 100644 index 0bed838..0000000 --- a/src/validators/authjsValidator.ts +++ /dev/null @@ -1,30 +0,0 @@ -import * as z from "zod"; - -export const options = { - value: 'authjs', - label: 'Authjs (Next-Auth)', - schema: 'authjsUserSchema' -} - -const authjsUserSchema = z.object({ - userId: z.string(), - email: z.string().email(), - firstName: z.string().optional(), - lastName: z.string().optional(), - password: z.string().optional(), - passwordHasher: z - .enum([ - "argon2i", - "argon2id", - "bcrypt", - "md5", - "pbkdf2_sha256", - "pbkdf2_sha256_django", - "pbkdf2_sha1", - "scrypt_firebase", - ]) - .optional(), -}); - -export default authjsUserSchema - diff --git a/src/validators/supabaseValidator.ts b/src/validators/supabaseValidator.ts deleted file mode 100644 index 7bb4c9a..0000000 --- a/src/validators/supabaseValidator.ts +++ /dev/null @@ -1,29 +0,0 @@ -import * as z from "zod"; - -export const options = { - value: 'supabase', - label: 'Supabase', - schema: 'supabaseUserSchema' -} - -const supabaseUserSchema = z.object({ - id: z.string(), - email: z.string().email(), - firstName: z.string().optional(), - lastName: z.string().optional(), - encrypted_password: z.string().optional(), - passwordHasher: z - .enum([ - "argon2i", - "argon2id", - "bcrypt", - "md5", - "pbkdf2_sha256", - "pbkdf2_sha256_django", - "pbkdf2_sha1", - "scrypt_firebase", - ]) - .default('bcrypt'), -}); - -export default supabaseUserSchema From 7a02123f909572e5fe22fdd46a4b6d0a19a7d1e8 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sun, 3 Mar 2024 03:06:00 -0500 Subject: [PATCH 09/31] (wip) Basic import is now working --- src/import-users.ts | 75 +++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 75 insertions(+) create mode 100644 src/import-users.ts diff --git a/src/import-users.ts b/src/import-users.ts new file mode 100644 index 0000000..c777aee --- /dev/null +++ b/src/import-users.ts @@ -0,0 +1,75 @@ +import clerkClient from "@clerk/clerk-sdk-node"; +import { env } from "./envs-constants"; +import { boolean } from "zod"; +import { User, userSchema } from "./functions"; + +type CliArgs = { + key: string, + file: string, + instance: string, + offest?: string, + begin: boolean +} + +async function cooldown(ms: number) { + await new Promise((r) => setTimeout(r, ms)); +} + + + +const createUser = (userData: User) => + userData.password + ? clerkClient.users.createUser({ + externalId: userData.userId, + emailAddress: [userData.email], + firstName: userData.firstName, + lastName: userData.lastName, + passwordDigest: userData.password, + passwordHasher: userData.passwordHasher, + }) + : clerkClient.users.createUser({ + externalId: userData.userId, + emailAddress: [userData.email], + firstName: userData.firstName, + lastName: userData.lastName, + skipPasswordRequirement: true, + }); + + + +async function processUserToClerk(userData: User) { + try { + const parsedUserData = userSchema.safeParse(userData); + if (!parsedUserData.success) { + throw parsedUserData.error; + } + await createUser(parsedUserData.data); + + } catch (error) { + if (error.status === 422) { + // appendLog({ userId: userData.userId, ...error }); + return; + } + + // Keep cooldown in case rate limit is reached as a fallback if the thread blocking fails + if (error.status === 429) { + await cooldown(env.RETRY_DELAY_MS) + return processUserToClerk(userData); + } + + // appendLog({ userId: userData.userId, ...error }); + } +} + + + +export const importUsers = async (users: User[], args: CliArgs) => { + + console.log('STARTING IMPORT') + + for (const user of users) { + await cooldown(env.DELAY) + await processUserToClerk(user) + } + +} From 8367ee7f19c01ad46c3582bc29ab09f44736fbe2 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sun, 3 Mar 2024 03:28:48 -0500 Subject: [PATCH 10/31] (wip) Cleanup and adding spinners + messaging --- index.ts | 84 +-------------------------------- src/cli.ts | 2 +- src/functions.ts | 11 +++-- src/handlers/authjsHandler.ts | 2 - src/handlers/clerkHandler.ts | 2 - src/handlers/supabaseHandler.ts | 2 - src/import-users.ts | 21 ++++++--- src/spinner.ts | 22 +++++++++ 8 files changed, 46 insertions(+), 100 deletions(-) diff --git a/index.ts b/index.ts index 6150bfa..6995c1b 100755 --- a/index.ts +++ b/index.ts @@ -1,20 +1,10 @@ import { config } from "dotenv"; config(); -import * as p from '@clack/prompts'; -import color from 'picocolors' -import { setTimeout } from 'node:timers/promises'; - -import * as fs from "fs"; -import * as path from 'path'; -import * as z from "zod"; -import clerkClient, { User } from "@clerk/clerk-sdk-node"; -import ora, { Ora } from "ora"; import { env } from "./src/envs-constants"; import { runCLI } from "./src/cli"; -import { loadUsersFromFile, loadValidator } from "./src/functions"; +import { loadUsersFromFile } from "./src/functions"; import { importUsers } from "./src/import-users"; -import authjsUserSchema from "./src/transformers/authjsTransfomer"; if (env.CLERK_SECRET_KEY.split("_")[1] !== "live" && env.IMPORT_TO_DEV === false) { throw new Error( @@ -22,83 +12,11 @@ if (env.CLERK_SECRET_KEY.split("_")[1] !== "live" && env.IMPORT_TO_DEV === false ); } - -// -// type User = z.infer; -// -// const createUser = (userData: User) => -// userData.password -// ? clerkClient.users.createUser({ -// externalId: userData.userId, -// emailAddress: [userData.email], -// firstName: userData.firstName, -// lastName: userData.lastName, -// passwordDigest: userData.password, -// passwordHasher: userData.passwordHasher, -// }) -// : clerkClient.users.createUser({ -// externalId: userData.userId, -// emailAddress: [userData.email], -// firstName: userData.firstName, -// lastName: userData.lastName, -// skipPasswordRequirement: true, -// }); -// -// const now = new Date().toISOString().split(".")[0]; // YYYY-MM-DDTHH:mm:ss -// function appendLog(payload: any) { -// fs.appendFileSync( -// `./migration-log-${now}.json`, -// `\n${JSON.stringify(payload, null, 2)}` -// ); -// } -// let migrated = 0; -// let alreadyExists = 0; -// -// async function processUserToClerk(userData: User, spinner: Ora) { -// const txt = spinner.text; -// try { -// const parsedUserData = authjsUserSchema.safeParse(userData); -// if (!parsedUserData.success) { -// throw parsedUserData.error; -// } -// console.log('USER', parsedUserData.data) -// // await createUser(parsedUserData.data); -// -// migrated++; -// } catch (error) { -// if (error.status === 422) { -// appendLog({ userId: userData.userId, ...error }); -// alreadyExists++; -// return; -// } -// -// // Keep cooldown in case rate limit is reached as a fallback if the thread blocking fails -// if (error.status === 429) { -// spinner.text = `${txt} - rate limit reached, waiting for ${env.RETRY_DELAY_MS} ms`; -// await rateLimitCooldown(); -// spinner.text = txt; -// return processUserToClerk(userData, spinner); -// } -// -// appendLog({ userId: userData.userId, ...error }); -// } -// } - - - - async function main() { - console.log('TEST') - const args = await runCLI() - // const userSchema = loadValidator(args.source) - // type User = z.infer; - const users = await loadUsersFromFile(args.file, args.key) - console.log(users) - const usersToImport = users.slice(parseInt(args.offset) > env.OFFSET ? parseInt(args.offset) : env.OFFSET); importUsers(usersToImport, args) diff --git a/src/cli.ts b/src/cli.ts index d39c2d5..16ff57e 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -62,7 +62,7 @@ export const runCLI = async () => { ) if (args.begin) { - console.log('Migration started') + // console.log('Migration started') } return args diff --git a/src/functions.ts b/src/functions.ts index b04d742..6623beb 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -4,6 +4,9 @@ import path from 'path' import mime from 'mime-types' import csvParser from 'csv-parser'; import * as z from "zod"; +import * as p from '@clack/prompts' + +const s = p.spinner() type Handler = { key: string; @@ -113,6 +116,9 @@ export const transformKeys = (data: Record, keys: any): Record { + s.start() + s.message('Loading users and perparing to migrate') + const type = getFileType(createImportFilePath(file)) const transformerKeys = handlers.find(obj => obj.key === key); @@ -157,10 +163,9 @@ export const loadUsersFromFile = async (file: string, key: string) => { // The data is not valid, handle errors console.error('Validation Errors:', validationResult.error.errors); } - } - - // console.log('transformed data', JSON.stringify(transformedData)) + s.stop('Users Loaded') + p.log.step('Users loaded') return transformedData } } diff --git a/src/handlers/authjsHandler.ts b/src/handlers/authjsHandler.ts index d1fded9..645dd6f 100644 --- a/src/handlers/authjsHandler.ts +++ b/src/handlers/authjsHandler.ts @@ -1,5 +1,3 @@ -import * as z from "zod"; - export const options = { key: 'authjs', label: 'Authjs (Next-Auth)', diff --git a/src/handlers/clerkHandler.ts b/src/handlers/clerkHandler.ts index 1aa60d6..5335df8 100644 --- a/src/handlers/clerkHandler.ts +++ b/src/handlers/clerkHandler.ts @@ -1,5 +1,3 @@ -import * as z from "zod"; - export const options = { key: 'clerk', label: 'Clerk', diff --git a/src/handlers/supabaseHandler.ts b/src/handlers/supabaseHandler.ts index 955430a..bbd1604 100644 --- a/src/handlers/supabaseHandler.ts +++ b/src/handlers/supabaseHandler.ts @@ -1,5 +1,3 @@ -import * as z from "zod"; - export const options = { key: 'supabase', label: 'Supabase', diff --git a/src/import-users.ts b/src/import-users.ts index c777aee..e1b9c82 100644 --- a/src/import-users.ts +++ b/src/import-users.ts @@ -1,7 +1,7 @@ import clerkClient from "@clerk/clerk-sdk-node"; import { env } from "./envs-constants"; -import { boolean } from "zod"; import { User, userSchema } from "./functions"; +import * as p from '@clack/prompts' type CliArgs = { key: string, @@ -11,12 +11,14 @@ type CliArgs = { begin: boolean } +const s = p.spinner() +let migrated = 0 + async function cooldown(ms: number) { await new Promise((r) => setTimeout(r, ms)); } - const createUser = (userData: User) => userData.password ? clerkClient.users.createUser({ @@ -37,13 +39,15 @@ const createUser = (userData: User) => -async function processUserToClerk(userData: User) { +async function processUserToClerk(userData: User, total: number) { try { const parsedUserData = userSchema.safeParse(userData); if (!parsedUserData.success) { throw parsedUserData.error; } await createUser(parsedUserData.data); + migrated++ + s.message(`Migrating users: [${migrated}/${total}]`) } catch (error) { if (error.status === 422) { @@ -54,7 +58,7 @@ async function processUserToClerk(userData: User) { // Keep cooldown in case rate limit is reached as a fallback if the thread blocking fails if (error.status === 429) { await cooldown(env.RETRY_DELAY_MS) - return processUserToClerk(userData); + return processUserToClerk(userData, total); } // appendLog({ userId: userData.userId, ...error }); @@ -65,11 +69,14 @@ async function processUserToClerk(userData: User) { export const importUsers = async (users: User[], args: CliArgs) => { - console.log('STARTING IMPORT') + s.start() + const total = users.length + s.message(`Migration users: [0/${total}]`) for (const user of users) { await cooldown(env.DELAY) - await processUserToClerk(user) + await processUserToClerk(user, total) } - + s.stop() + p.outro('Migration complete') } diff --git a/src/spinner.ts b/src/spinner.ts index e69de29..4a466a0 100644 --- a/src/spinner.ts +++ b/src/spinner.ts @@ -0,0 +1,22 @@ +import * as p from '@clack/prompts'; + +p.intro('spinner start...'); + +const spin = p.spinner(); +const total = 10000; +let progress = 0; +spin.start(); + +new Promise((resolve) => { + const timer = setInterval(() => { + progress = Math.min(total, progress + 100); + if (progress >= total) { + clearInterval(timer); + resolve(true); + } + spin.message(`Loading packages [${progress}/${total}]`); // <=== + }, 100); +}).then(() => { + spin.stop(`Done`); + p.outro('spinner stop...'); +}); From 398e9ec777150f47f714cdccc98ce4a01b84c66b Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sun, 3 Mar 2024 11:45:03 -0500 Subject: [PATCH 11/31] (wip) Added logger, some cleanup --- index.ts | 4 ---- src/functions.ts | 11 +++++++++-- src/import-users.ts | 22 ++++++++++++---------- src/logger.ts | 24 ++++++++++++++++++++++++ 4 files changed, 45 insertions(+), 16 deletions(-) create mode 100644 src/logger.ts diff --git a/index.ts b/index.ts index 6995c1b..a9b2d02 100755 --- a/index.ts +++ b/index.ts @@ -23,8 +23,4 @@ async function main() { } - - - - main() diff --git a/src/functions.ts b/src/functions.ts index 6623beb..94ee47b 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -5,6 +5,7 @@ import mime from 'mime-types' import csvParser from 'csv-parser'; import * as z from "zod"; import * as p from '@clack/prompts' +import { logger } from './logger'; const s = p.spinner() @@ -79,6 +80,11 @@ export const getFileType = (file: string) => { return mime.lookup(createImportFilePath(file)) } +export const getDateTimeStamp = () => { + return new Date().toISOString().split(".")[0]; // YYYY-MM-DDTHH:mm:ss + +} + // emulate what Clack CLI expects for an option in a Select / MultiSelect export type OptionType = { value: string; @@ -114,8 +120,8 @@ export const transformKeys = (data: Record, keys: any): Record { - +export const loadUsersFromFile = async (file: string, key: string): Promise => { + const dateTime = getDateTimeStamp() s.start() s.message('Loading users and perparing to migrate') @@ -162,6 +168,7 @@ export const loadUsersFromFile = async (file: string, key: string) => { } else { // The data is not valid, handle errors console.error('Validation Errors:', validationResult.error.errors); + logger("error", validationResult.error.errors, dateTime) } } s.stop('Users Loaded') diff --git a/src/import-users.ts b/src/import-users.ts index e1b9c82..207f937 100644 --- a/src/import-users.ts +++ b/src/import-users.ts @@ -1,7 +1,8 @@ import clerkClient from "@clerk/clerk-sdk-node"; import { env } from "./envs-constants"; -import { User, userSchema } from "./functions"; +import { User, getDateTimeStamp, userSchema } from "./functions"; import * as p from '@clack/prompts' +import { logger } from "./logger"; type CliArgs = { key: string, @@ -39,7 +40,7 @@ const createUser = (userData: User) => -async function processUserToClerk(userData: User, total: number) { +async function processUserToClerk(userData: User, total: number, dateTime: string) { try { const parsedUserData = userSchema.safeParse(userData); if (!parsedUserData.success) { @@ -50,18 +51,18 @@ async function processUserToClerk(userData: User, total: number) { s.message(`Migrating users: [${migrated}/${total}]`) } catch (error) { - if (error.status === 422) { - // appendLog({ userId: userData.userId, ...error }); - return; - } - // Keep cooldown in case rate limit is reached as a fallback if the thread blocking fails if (error.status === 429) { await cooldown(env.RETRY_DELAY_MS) - return processUserToClerk(userData, total); + return processUserToClerk(userData, total, dateTime); + } + + if (error.status === 422) { + logger({ userId: userData.userId, ...error }, "error", dateTime); + return; } - // appendLog({ userId: userData.userId, ...error }); + logger({ userId: userData.userId, ...error }, "info", dateTime); } } @@ -69,13 +70,14 @@ async function processUserToClerk(userData: User, total: number) { export const importUsers = async (users: User[], args: CliArgs) => { + const dateTime = getDateTimeStamp() s.start() const total = users.length s.message(`Migration users: [0/${total}]`) for (const user of users) { await cooldown(env.DELAY) - await processUserToClerk(user, total) + await processUserToClerk(user, total, dateTime) } s.stop() p.outro('Migration complete') diff --git a/src/logger.ts b/src/logger.ts new file mode 100644 index 0000000..7d1330f --- /dev/null +++ b/src/logger.ts @@ -0,0 +1,24 @@ +import fs from 'fs'; + +export const logger = (type: "info" | "error" | "validation", payload: any, dateTime: string): void => { + + console.log(type) + + + if (type === "info") { + + fs.appendFileSync( + `./logs/info/${dateTime}.json`, + `\n${JSON.stringify(payload, null, 2)}` + ); + } + + if (type === "error") { + fs.appendFileSync( + `./logs/errors/${dateTime}.json`, + `\n${JSON.stringify(payload, null, 2)}` + ); + + } + +} From 3f400b2566f0f12e79d02b5ad5ab73837858b591 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sun, 3 Mar 2024 12:01:37 -0500 Subject: [PATCH 12/31] (wip) Improved logger, creates directories as needed --- src/functions.ts | 5 +---- src/logger.ts | 44 +++++++++++++++++++++++++++++++++++++++++--- 2 files changed, 42 insertions(+), 7 deletions(-) diff --git a/src/functions.ts b/src/functions.ts index 94ee47b..4065dc6 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -153,9 +153,7 @@ export const loadUsersFromFile = async (file: string, key: string): Promise { + console.log('creating', path) + try { + if (!fs.existsSync(path)) { + fs.mkdirSync(path); + } + } catch (err) { + console.error(err); + } + +} -export const logger = (type: "info" | "error" | "validation", payload: any, dateTime: string): void => { +export const logger = (type: "info" | "error" | "validator", payload: any, dateTime: string): void => { + confirmOrCreateFolder(path.join(__dirname, '..', 'logs')) console.log(type) if (type === "info") { + const infoPath = path.join(__dirname, '..', 'logs', 'info') + + confirmOrCreateFolder(infoPath) fs.appendFileSync( - `./logs/info/${dateTime}.json`, + `${infoPath}/${dateTime}.json`, `\n${JSON.stringify(payload, null, 2)}` ); } if (type === "error") { + const errorsPath = path.join(__dirname, '..', 'logs', 'errors') + console.log(errorsPath) + confirmOrCreateFolder(errorsPath) + + + + fs.appendFileSync( + `${errorsPath}/${dateTime}.json`, + `\n${JSON.stringify(payload, null, 2)}` + ); + + } + + + if (type === "validator") { + const validatorPath = path.join(__dirname, '..', 'logs', 'validator') + confirmOrCreateFolder(validatorPath) + + + fs.appendFileSync( - `./logs/errors/${dateTime}.json`, + `${validatorPath}/${dateTime}.json`, `\n${JSON.stringify(payload, null, 2)}` ); } + } From f2d6e6a236a959769c903e8363f997fb5ee339e1 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sun, 3 Mar 2024 17:53:50 -0500 Subject: [PATCH 13/31] (wip) Improved logger significantly, removed blank/empty entries from .csv, cleanup --- index.ts | 2 +- package-lock.json | 1114 ---------------------------------- package.json | 6 +- src/cli.ts | 7 +- src/functions.ts | 28 +- src/handlers/clerkHandler.ts | 5 +- src/import-users.ts | 13 +- src/logger.ts | 109 +++- 8 files changed, 112 insertions(+), 1172 deletions(-) delete mode 100644 package-lock.json diff --git a/index.ts b/index.ts index a9b2d02..acd72df 100755 --- a/index.ts +++ b/index.ts @@ -19,7 +19,7 @@ async function main() { const usersToImport = users.slice(parseInt(args.offset) > env.OFFSET ? parseInt(args.offset) : env.OFFSET); - importUsers(usersToImport, args) + importUsers(usersToImport) } diff --git a/package-lock.json b/package-lock.json deleted file mode 100644 index 0140910..0000000 --- a/package-lock.json +++ /dev/null @@ -1,1114 +0,0 @@ -{ - "name": "clerk-user-migration", - "version": "0.0.1", - "lockfileVersion": 3, - "requires": true, - "packages": { - "": { - "name": "clerk-user-migration", - "version": "0.0.1", - "license": "ISC", - "dependencies": { - "@clack/prompts": "^0.7.0", - "@clerk/clerk-sdk-node": "^4.12.21", - "bun": "^1.0.12", - "csv-parser": "^3.0.0", - "dotenv": "^16.3.1", - "mime-types": "^2.1.35", - "ora": "^7.0.1", - "picocolors": "^1.0.0", - "segfault-handler": "^1.3.0", - "zod": "^3.22.4" - }, - "bin": { - "clerk-user-migration": "index.ts" - }, - "devDependencies": { - "@types/mime-types": "^2.1.4" - } - }, - "node_modules/@clack/core": { - "version": "0.3.4", - "resolved": "https://registry.npmjs.org/@clack/core/-/core-0.3.4.tgz", - "integrity": "sha512-H4hxZDXgHtWTwV3RAVenqcC4VbJZNegbBjlPvzOzCouXtS2y3sDvlO3IsbrPNWuLWPPlYVYPghQdSF64683Ldw==", - "dependencies": { - "picocolors": "^1.0.0", - "sisteransi": "^1.0.5" - } - }, - "node_modules/@clack/prompts": { - "version": "0.7.0", - "resolved": "https://registry.npmjs.org/@clack/prompts/-/prompts-0.7.0.tgz", - "integrity": "sha512-0MhX9/B4iL6Re04jPrttDm+BsP8y6mS7byuv0BvXgdXhbV5PdlsHt55dvNsuBCPZ7xq1oTAOOuotR9NFbQyMSA==", - "bundleDependencies": [ - "is-unicode-supported" - ], - "dependencies": { - "@clack/core": "^0.3.3", - "is-unicode-supported": "*", - "picocolors": "^1.0.0", - "sisteransi": "^1.0.5" - } - }, - "node_modules/@clack/prompts/node_modules/is-unicode-supported": { - "version": "1.3.0", - "extraneous": true, - "inBundle": true, - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@clerk/backend": { - "version": "0.34.1", - "resolved": "https://registry.npmjs.org/@clerk/backend/-/backend-0.34.1.tgz", - "integrity": "sha512-I6u7vb7XHA0kNek5Ez4VVqBDZKxLepR6wJXlYUy5lGwsTdaQiFwy5Q0nKP2GdQQYtlKpXSAryLu19Cq5zaaNYg==", - "dependencies": { - "@clerk/shared": "1.1.0", - "@clerk/types": "3.58.0", - "@peculiar/webcrypto": "1.4.1", - "@types/node": "16.18.6", - "cookie": "0.5.0", - "deepmerge": "4.2.2", - "node-fetch-native": "1.0.1", - "snakecase-keys": "5.4.4", - "tslib": "2.4.1" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/@clerk/backend/node_modules/snakecase-keys": { - "version": "5.4.4", - "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-5.4.4.tgz", - "integrity": "sha512-YTywJG93yxwHLgrYLZjlC75moVEX04LZM4FHfihjHe1FCXm+QaLOFfSf535aXOAd0ArVQMWUAe8ZPm4VtWyXaA==", - "dependencies": { - "map-obj": "^4.1.0", - "snake-case": "^3.0.4", - "type-fest": "^2.5.2" - }, - "engines": { - "node": ">=12" - } - }, - "node_modules/@clerk/backend/node_modules/tslib": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.1.tgz", - "integrity": "sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==" - }, - "node_modules/@clerk/backend/node_modules/type-fest": { - "version": "2.19.0", - "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-2.19.0.tgz", - "integrity": "sha512-RAH822pAdBgcNMAfWnCBU3CFZcfZ/i1eZjwFU/dsLKumyuuP3niueg2UAukXYF0E2AAoc82ZSSf9J0WQBinzHA==", - "engines": { - "node": ">=12.20" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/@clerk/clerk-sdk-node": { - "version": "4.12.21", - "resolved": "https://registry.npmjs.org/@clerk/clerk-sdk-node/-/clerk-sdk-node-4.12.21.tgz", - "integrity": "sha512-43MdviLlAG3naNzRyxF/Io8YYQBnFEIQiqYFVHzKzZGEsbPST9lBfeFxJZKrCqSE8K7gMx3+3D87bveXq6a7cA==", - "dependencies": { - "@clerk/backend": "0.34.1", - "@clerk/shared": "1.1.0", - "@clerk/types": "3.58.0", - "@types/cookies": "0.7.7", - "@types/express": "4.17.14", - "@types/node-fetch": "2.6.2", - "camelcase-keys": "6.2.2", - "snakecase-keys": "3.2.1", - "tslib": "2.4.1" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/@clerk/clerk-sdk-node/node_modules/tslib": { - "version": "2.4.1", - "resolved": "https://registry.npmjs.org/tslib/-/tslib-2.4.1.tgz", - "integrity": "sha512-tGyy4dAjRIEwI7BzsB0lynWgOpfqjUdq91XXAlIWD2OwKBH7oCl/GZG/HT4BOHrTlPMOASlMQ7veyTqpmRcrNA==" - }, - "node_modules/@clerk/shared": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@clerk/shared/-/shared-1.1.0.tgz", - "integrity": "sha512-rxQ6bxAERZsf/dzCU35qt3gRp9+a035Vrre8j8tyT60dbP8PQhXUbeNu+oVqqjpHWeyoWWt6fZGLXbDTXdXx7g==", - "dependencies": { - "glob-to-regexp": "0.4.1", - "js-cookie": "3.0.1", - "swr": "2.2.0" - }, - "peerDependencies": { - "react": ">=16" - }, - "peerDependenciesMeta": { - "react": { - "optional": true - } - } - }, - "node_modules/@clerk/types": { - "version": "3.58.0", - "resolved": "https://registry.npmjs.org/@clerk/types/-/types-3.58.0.tgz", - "integrity": "sha512-fIsvEM3nYQwViOuYxNVcwEl0WkXW6AdYpSghNBKfOge1kriSSHP++T5rRMJBXy6asl2AEydVlUBKx9drAzqKoA==", - "dependencies": { - "csstype": "3.1.1" - }, - "engines": { - "node": ">=14" - } - }, - "node_modules/@clerk/types/node_modules/csstype": { - "version": "3.1.1", - "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.1.tgz", - "integrity": "sha512-DJR/VvkAvSZW9bTouZue2sSxDwdTN92uHjqeKVm+0dAqdfNykRzQ95tay8aXMBAAPpUiq4Qcug2L7neoRh2Egw==" - }, - "node_modules/@oven/bun-darwin-aarch64": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/@oven/bun-darwin-aarch64/-/bun-darwin-aarch64-1.0.12.tgz", - "integrity": "sha512-e/iNyt8HXlvDTzyvKUyq+vIUVyID9WykyDvNEcz5jM9bcdwimiAo+VGvRhAWnRkazhDBY5H3DL+ixEGy0ljIGw==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@oven/bun-darwin-x64": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/@oven/bun-darwin-x64/-/bun-darwin-x64-1.0.12.tgz", - "integrity": "sha512-CWfuYPJ1oObCKskOZeg7aM6ToJgt1LEpIIyaqRiYiVji3lrEcnNVPFUJqj7JlQrchZrcrqRr0duKypVCQ+8Jig==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@oven/bun-darwin-x64-baseline": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/@oven/bun-darwin-x64-baseline/-/bun-darwin-x64-baseline-1.0.12.tgz", - "integrity": "sha512-E/0pWuimJlrSzbk6TLgHHvJ0YkRv6oUT1grvgbJz1zyY5/86tAzbc8N6i37kot3jvJ/qF4pF98DkAK+V5TKOMg==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "darwin" - ] - }, - "node_modules/@oven/bun-linux-aarch64": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/@oven/bun-linux-aarch64/-/bun-linux-aarch64-1.0.12.tgz", - "integrity": "sha512-0az/FbWNerffUw4ik2VYq/L1m+YncV1uRj59YJMVgB7Eyo1ykgGAmKM/7bUFNrwO1c8Ydz0vj2oOXeYJzWc1Tg==", - "cpu": [ - "arm64" - ], - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@oven/bun-linux-x64": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/@oven/bun-linux-x64/-/bun-linux-x64-1.0.12.tgz", - "integrity": "sha512-A5PP4JpKVwqtj31ZPOHJlerFyw8zOJKRk6ssk1m0jRaFm0/4tEcpqQzX/pPmZcoFhWKcKDnwSJDUIT5vR0q24w==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@oven/bun-linux-x64-baseline": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/@oven/bun-linux-x64-baseline/-/bun-linux-x64-baseline-1.0.12.tgz", - "integrity": "sha512-/sSpuNXbCnNoZ3HHL2veGZWmBqIEeM4skaAMp4rSD+Yf5NbHZXeB4qhj7bp7DTMyRESkScMir1DpJifqNhNd/Q==", - "cpu": [ - "x64" - ], - "optional": true, - "os": [ - "linux" - ] - }, - "node_modules/@peculiar/asn1-schema": { - "version": "2.3.8", - "resolved": "https://registry.npmjs.org/@peculiar/asn1-schema/-/asn1-schema-2.3.8.tgz", - "integrity": "sha512-ULB1XqHKx1WBU/tTFIA+uARuRoBVZ4pNdOA878RDrRbBfBGcSzi5HBkdScC6ZbHn8z7L8gmKCgPC1LHRrP46tA==", - "dependencies": { - "asn1js": "^3.0.5", - "pvtsutils": "^1.3.5", - "tslib": "^2.6.2" - } - }, - "node_modules/@peculiar/json-schema": { - "version": "1.1.12", - "resolved": "https://registry.npmjs.org/@peculiar/json-schema/-/json-schema-1.1.12.tgz", - "integrity": "sha512-coUfuoMeIB7B8/NMekxaDzLhaYmp0HZNPEjYRm9goRou8UZIC3z21s0sL9AWoCw4EG876QyO3kYrc61WNF9B/w==", - "dependencies": { - "tslib": "^2.0.0" - }, - "engines": { - "node": ">=8.0.0" - } - }, - "node_modules/@peculiar/webcrypto": { - "version": "1.4.1", - "resolved": "https://registry.npmjs.org/@peculiar/webcrypto/-/webcrypto-1.4.1.tgz", - "integrity": "sha512-eK4C6WTNYxoI7JOabMoZICiyqRRtJB220bh0Mbj5RwRycleZf9BPyZoxsTvpP0FpmVS2aS13NKOuh5/tN3sIRw==", - "dependencies": { - "@peculiar/asn1-schema": "^2.3.0", - "@peculiar/json-schema": "^1.1.12", - "pvtsutils": "^1.3.2", - "tslib": "^2.4.1", - "webcrypto-core": "^1.7.4" - }, - "engines": { - "node": ">=10.12.0" - } - }, - "node_modules/@types/body-parser": { - "version": "1.19.5", - "resolved": "https://registry.npmjs.org/@types/body-parser/-/body-parser-1.19.5.tgz", - "integrity": "sha512-fB3Zu92ucau0iQ0JMCFQE7b/dv8Ot07NI3KaZIkIUNXq82k4eBAqUaneXfleGY9JWskeS9y+u0nXMyspcuQrCg==", - "dependencies": { - "@types/connect": "*", - "@types/node": "*" - } - }, - "node_modules/@types/connect": { - "version": "3.4.38", - "resolved": "https://registry.npmjs.org/@types/connect/-/connect-3.4.38.tgz", - "integrity": "sha512-K6uROf1LD88uDQqJCktA4yzL1YYAK6NgfsI0v/mTgyPKWsX1CnJ0XPSDhViejru1GcRkLWb8RlzFYJRqGUbaug==", - "dependencies": { - "@types/node": "*" - } - }, - "node_modules/@types/cookies": { - "version": "0.7.7", - "resolved": "https://registry.npmjs.org/@types/cookies/-/cookies-0.7.7.tgz", - "integrity": "sha512-h7BcvPUogWbKCzBR2lY4oqaZbO3jXZksexYJVFvkrFeLgbZjQkU4x8pRq6eg2MHXQhY0McQdqmmsxRWlVAHooA==", - "dependencies": { - "@types/connect": "*", - "@types/express": "*", - "@types/keygrip": "*", - "@types/node": "*" - } - }, - "node_modules/@types/express": { - "version": "4.17.14", - "resolved": "https://registry.npmjs.org/@types/express/-/express-4.17.14.tgz", - "integrity": "sha512-TEbt+vaPFQ+xpxFLFssxUDXj5cWCxZJjIcB7Yg0k0GMHGtgtQgpvx/MUQUeAkNbA9AAGrwkAsoeItdTgS7FMyg==", - "dependencies": { - "@types/body-parser": "*", - "@types/express-serve-static-core": "^4.17.18", - "@types/qs": "*", - "@types/serve-static": "*" - } - }, - "node_modules/@types/express-serve-static-core": { - "version": "4.17.41", - "resolved": "https://registry.npmjs.org/@types/express-serve-static-core/-/express-serve-static-core-4.17.41.tgz", - "integrity": "sha512-OaJ7XLaelTgrvlZD8/aa0vvvxZdUmlCn6MtWeB7TkiKW70BQLc9XEPpDLPdbo52ZhXUCrznlWdCHWxJWtdyajA==", - "dependencies": { - "@types/node": "*", - "@types/qs": "*", - "@types/range-parser": "*", - "@types/send": "*" - } - }, - "node_modules/@types/http-errors": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/@types/http-errors/-/http-errors-2.0.4.tgz", - "integrity": "sha512-D0CFMMtydbJAegzOyHjtiKPLlvnm3iTZyZRSZoLq2mRhDdmLfIWOCYPfQJ4cu2erKghU++QvjcUjp/5h7hESpA==" - }, - "node_modules/@types/keygrip": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@types/keygrip/-/keygrip-1.0.5.tgz", - "integrity": "sha512-M+BUYYOXgiYoab5L98VpOY1PzmDwWcTkqqu4mdluez5qOTDV0MVPChxhRIPeIFxQgSi3+6qjg1PnGFaGlW373g==" - }, - "node_modules/@types/mime": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/@types/mime/-/mime-1.3.5.tgz", - "integrity": "sha512-/pyBZWSLD2n0dcHE3hq8s8ZvcETHtEuF+3E7XVt0Ig2nvsVQXdghHVcEkIWjy9A0wKfTn97a/PSDYohKIlnP/w==" - }, - "node_modules/@types/mime-types": { - "version": "2.1.4", - "resolved": "https://registry.npmjs.org/@types/mime-types/-/mime-types-2.1.4.tgz", - "integrity": "sha512-lfU4b34HOri+kAY5UheuFMWPDOI+OPceBSHZKp69gEyTL/mmJ4cnU6Y/rlme3UL3GyOn6Y42hyIEw0/q8sWx5w==", - "dev": true - }, - "node_modules/@types/node": { - "version": "16.18.6", - "resolved": "https://registry.npmjs.org/@types/node/-/node-16.18.6.tgz", - "integrity": "sha512-vmYJF0REqDyyU0gviezF/KHq/fYaUbFhkcNbQCuPGFQj6VTbXuHZoxs/Y7mutWe73C8AC6l9fFu8mSYiBAqkGA==" - }, - "node_modules/@types/node-fetch": { - "version": "2.6.2", - "resolved": "https://registry.npmjs.org/@types/node-fetch/-/node-fetch-2.6.2.tgz", - "integrity": "sha512-DHqhlq5jeESLy19TYhLakJ07kNumXWjcDdxXsLUMJZ6ue8VZJj4kLPQVE/2mdHh3xZziNF1xppu5lwmS53HR+A==", - "dependencies": { - "@types/node": "*", - "form-data": "^3.0.0" - } - }, - "node_modules/@types/qs": { - "version": "6.9.10", - "resolved": "https://registry.npmjs.org/@types/qs/-/qs-6.9.10.tgz", - "integrity": "sha512-3Gnx08Ns1sEoCrWssEgTSJs/rsT2vhGP+Ja9cnnk9k4ALxinORlQneLXFeFKOTJMOeZUFD1s7w+w2AphTpvzZw==" - }, - "node_modules/@types/range-parser": { - "version": "1.2.7", - "resolved": "https://registry.npmjs.org/@types/range-parser/-/range-parser-1.2.7.tgz", - "integrity": "sha512-hKormJbkJqzQGhziax5PItDUTMAM9uE2XXQmM37dyd4hVM+5aVl7oVxMVUiVQn2oCQFN/LKCZdvSM0pFRqbSmQ==" - }, - "node_modules/@types/send": { - "version": "0.17.4", - "resolved": "https://registry.npmjs.org/@types/send/-/send-0.17.4.tgz", - "integrity": "sha512-x2EM6TJOybec7c52BX0ZspPodMsQUd5L6PRwOunVyVUhXiBSKf3AezDL8Dgvgt5o0UfKNfuA0eMLr2wLT4AiBA==", - "dependencies": { - "@types/mime": "^1", - "@types/node": "*" - } - }, - "node_modules/@types/serve-static": { - "version": "1.15.5", - "resolved": "https://registry.npmjs.org/@types/serve-static/-/serve-static-1.15.5.tgz", - "integrity": "sha512-PDRk21MnK70hja/YF8AHfC7yIsiQHn1rcXx7ijCFBX/k+XQJhQT/gw3xekXKJvx+5SXaMMS8oqQy09Mzvz2TuQ==", - "dependencies": { - "@types/http-errors": "*", - "@types/mime": "*", - "@types/node": "*" - } - }, - "node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/ansi-regex?sponsor=1" - } - }, - "node_modules/asn1js": { - "version": "3.0.5", - "resolved": "https://registry.npmjs.org/asn1js/-/asn1js-3.0.5.tgz", - "integrity": "sha512-FVnvrKJwpt9LP2lAMl8qZswRNm3T4q9CON+bxldk2iwk3FFpuwhx2FfinyitizWHsVYyaY+y5JzDR0rCMV5yTQ==", - "dependencies": { - "pvtsutils": "^1.3.2", - "pvutils": "^1.1.3", - "tslib": "^2.4.0" - }, - "engines": { - "node": ">=12.0.0" - } - }, - "node_modules/asynckit": { - "version": "0.4.0", - "resolved": "https://registry.npmjs.org/asynckit/-/asynckit-0.4.0.tgz", - "integrity": "sha512-Oei9OH4tRh0YqU3GxhX79dM/mwVgvbZJaSNaRk+bshkj0S5cfHcgYakreBjrHwatXKbz+IoIdYLxrKim2MjW0Q==" - }, - "node_modules/base64-js": { - "version": "1.5.1", - "resolved": "https://registry.npmjs.org/base64-js/-/base64-js-1.5.1.tgz", - "integrity": "sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/bindings": { - "version": "1.5.0", - "resolved": "https://registry.npmjs.org/bindings/-/bindings-1.5.0.tgz", - "integrity": "sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ==", - "dependencies": { - "file-uri-to-path": "1.0.0" - } - }, - "node_modules/bl": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/bl/-/bl-5.1.0.tgz", - "integrity": "sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ==", - "dependencies": { - "buffer": "^6.0.3", - "inherits": "^2.0.4", - "readable-stream": "^3.4.0" - } - }, - "node_modules/buffer": { - "version": "6.0.3", - "resolved": "https://registry.npmjs.org/buffer/-/buffer-6.0.3.tgz", - "integrity": "sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ], - "dependencies": { - "base64-js": "^1.3.1", - "ieee754": "^1.2.1" - } - }, - "node_modules/bun": { - "version": "1.0.12", - "resolved": "https://registry.npmjs.org/bun/-/bun-1.0.12.tgz", - "integrity": "sha512-I0CAJJ0HQcu+hdid1jPpRuG1qAyiToZD2eJ0jOX9FLPvhyQQcul6DjRAlW+N1gk9brovK82sba4GvEQxVdCyUA==", - "cpu": [ - "arm64", - "x64" - ], - "hasInstallScript": true, - "os": [ - "darwin", - "linux" - ], - "bin": { - "bun": "bin/bun", - "bunx": "bin/bun" - }, - "optionalDependencies": { - "@oven/bun-darwin-aarch64": "1.0.12", - "@oven/bun-darwin-x64": "1.0.12", - "@oven/bun-darwin-x64-baseline": "1.0.12", - "@oven/bun-linux-aarch64": "1.0.12", - "@oven/bun-linux-x64": "1.0.12", - "@oven/bun-linux-x64-baseline": "1.0.12" - } - }, - "node_modules/camelcase": { - "version": "5.3.1", - "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", - "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", - "engines": { - "node": ">=6" - } - }, - "node_modules/camelcase-keys": { - "version": "6.2.2", - "resolved": "https://registry.npmjs.org/camelcase-keys/-/camelcase-keys-6.2.2.tgz", - "integrity": "sha512-YrwaA0vEKazPBkn0ipTiMpSajYDSe+KjQfrjhcBMxJt/znbvlHd8Pw/Vamaz5EB4Wfhs3SUR3Z9mwRu/P3s3Yg==", - "dependencies": { - "camelcase": "^5.3.1", - "map-obj": "^4.0.0", - "quick-lru": "^4.0.1" - }, - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/chalk": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/chalk/-/chalk-5.3.0.tgz", - "integrity": "sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==", - "engines": { - "node": "^12.17.0 || ^14.13 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/chalk/chalk?sponsor=1" - } - }, - "node_modules/cli-cursor": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-4.0.0.tgz", - "integrity": "sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==", - "dependencies": { - "restore-cursor": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/cli-spinners": { - "version": "2.9.1", - "resolved": "https://registry.npmjs.org/cli-spinners/-/cli-spinners-2.9.1.tgz", - "integrity": "sha512-jHgecW0pxkonBJdrKsqxgRX9AcG+u/5k0Q7WPDfi8AogLAdwxEkyYYNWwZ5GvVFoFx2uiY1eNcSK00fh+1+FyQ==", - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/combined-stream": { - "version": "1.0.8", - "resolved": "https://registry.npmjs.org/combined-stream/-/combined-stream-1.0.8.tgz", - "integrity": "sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg==", - "dependencies": { - "delayed-stream": "~1.0.0" - }, - "engines": { - "node": ">= 0.8" - } - }, - "node_modules/cookie": { - "version": "0.5.0", - "resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz", - "integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/csv-parser": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/csv-parser/-/csv-parser-3.0.0.tgz", - "integrity": "sha512-s6OYSXAK3IdKqYO33y09jhypG/bSDHPuyCme/IdEHfWpLf/jKcpitVFyOC6UemgGk8v7Q5u2XE0vvwmanxhGlQ==", - "dependencies": { - "minimist": "^1.2.0" - }, - "bin": { - "csv-parser": "bin/csv-parser" - }, - "engines": { - "node": ">= 10" - } - }, - "node_modules/deepmerge": { - "version": "4.2.2", - "resolved": "https://registry.npmjs.org/deepmerge/-/deepmerge-4.2.2.tgz", - "integrity": "sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg==", - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/delayed-stream": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/delayed-stream/-/delayed-stream-1.0.0.tgz", - "integrity": "sha512-ZySD7Nf91aLB0RxL4KGrKHBXl7Eds1DAmEdcoVawXnLD7SDhpNgtuII2aAkg7a7QS41jxPSZ17p4VdGnMHk3MQ==", - "engines": { - "node": ">=0.4.0" - } - }, - "node_modules/dot-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/dot-case/-/dot-case-3.0.4.tgz", - "integrity": "sha512-Kv5nKlh6yRrdrGvxeJ2e5y2eRUpkUosIW4A2AS38zwSz27zu7ufDwQPi5Jhs3XAlGNetl3bmnGhQsMtkKJnj3w==", - "dependencies": { - "no-case": "^3.0.4", - "tslib": "^2.0.3" - } - }, - "node_modules/dotenv": { - "version": "16.3.1", - "resolved": "https://registry.npmjs.org/dotenv/-/dotenv-16.3.1.tgz", - "integrity": "sha512-IPzF4w4/Rd94bA9imS68tZBaYyBWSCE47V1RGuMrB94iyTOIEwRmVL2x/4An+6mETpLrKJ5hQkB8W4kFAadeIQ==", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/motdotla/dotenv?sponsor=1" - } - }, - "node_modules/eastasianwidth": { - "version": "0.2.0", - "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", - "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==" - }, - "node_modules/emoji-regex": { - "version": "10.3.0", - "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-10.3.0.tgz", - "integrity": "sha512-QpLs9D9v9kArv4lfDEgg1X/gN5XLnf/A6l9cs8SPZLRZR3ZkY9+kwIQTxm+fsSej5UMYGE8fdoaZVIBlqG0XTw==" - }, - "node_modules/file-uri-to-path": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz", - "integrity": "sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw==" - }, - "node_modules/form-data": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/form-data/-/form-data-3.0.1.tgz", - "integrity": "sha512-RHkBKtLWUVwd7SqRIvCZMEvAMoGUp0XU+seQiZejj0COz3RI3hWP4sCv3gZWWLjJTd7rGwcsF5eKZGii0r/hbg==", - "dependencies": { - "asynckit": "^0.4.0", - "combined-stream": "^1.0.8", - "mime-types": "^2.1.12" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/glob-to-regexp": { - "version": "0.4.1", - "resolved": "https://registry.npmjs.org/glob-to-regexp/-/glob-to-regexp-0.4.1.tgz", - "integrity": "sha512-lkX1HJXwyMcprw/5YUZc2s7DrpAiHB21/V+E1rHUrVNokkvB6bqMzT0VfV6/86ZNabt1k14YOIaT7nDvOX3Iiw==" - }, - "node_modules/ieee754": { - "version": "1.2.1", - "resolved": "https://registry.npmjs.org/ieee754/-/ieee754-1.2.1.tgz", - "integrity": "sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "node_modules/is-interactive": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is-interactive/-/is-interactive-2.0.0.tgz", - "integrity": "sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/is-unicode-supported": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/is-unicode-supported/-/is-unicode-supported-1.3.0.tgz", - "integrity": "sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/js-cookie": { - "version": "3.0.1", - "resolved": "https://registry.npmjs.org/js-cookie/-/js-cookie-3.0.1.tgz", - "integrity": "sha512-+0rgsUXZu4ncpPxRL+lNEptWMOWl9etvPHc/koSRp6MPwpRYAhmk0dUG00J4bxVV3r9uUzfo24wW0knS07SKSw==", - "engines": { - "node": ">=12" - } - }, - "node_modules/js-tokens": { - "version": "4.0.0", - "license": "MIT", - "peer": true - }, - "node_modules/log-symbols": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/log-symbols/-/log-symbols-5.1.0.tgz", - "integrity": "sha512-l0x2DvrW294C9uDCoQe1VSU4gf529FkSZ6leBl4TiqZH/e+0R7hSfHQBNut2mNygDgHwvYHfFLn6Oxb3VWj2rA==", - "dependencies": { - "chalk": "^5.0.0", - "is-unicode-supported": "^1.1.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/loose-envify": { - "version": "1.4.0", - "license": "MIT", - "peer": true, - "dependencies": { - "js-tokens": "^3.0.0 || ^4.0.0" - }, - "bin": { - "loose-envify": "cli.js" - } - }, - "node_modules/lower-case": { - "version": "2.0.2", - "resolved": "https://registry.npmjs.org/lower-case/-/lower-case-2.0.2.tgz", - "integrity": "sha512-7fm3l3NAF9WfN6W3JOmf5drwpVqX78JtoGJ3A6W0a6ZnldM41w2fV5D490psKFTpMds8TJse/eHLFFsNHHjHgg==", - "dependencies": { - "tslib": "^2.0.3" - } - }, - "node_modules/map-obj": { - "version": "4.3.0", - "resolved": "https://registry.npmjs.org/map-obj/-/map-obj-4.3.0.tgz", - "integrity": "sha512-hdN1wVrZbb29eBGiGjJbeP8JbKjq1urkHJ/LIP/NY48MZ1QVXUsQBV1G1zvYFHn1XE06cwjBsOI2K3Ulnj1YXQ==", - "engines": { - "node": ">=8" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/mime-db": { - "version": "1.52.0", - "resolved": "https://registry.npmjs.org/mime-db/-/mime-db-1.52.0.tgz", - "integrity": "sha512-sPU4uV7dYlvtWJxwwxHD0PuihVNiE7TyAbQ5SWxDCB9mUYvOgroQOwYQQOKPJ8CIbE+1ETVlOoK1UC2nU3gYvg==", - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mime-types": { - "version": "2.1.35", - "resolved": "https://registry.npmjs.org/mime-types/-/mime-types-2.1.35.tgz", - "integrity": "sha512-ZDY+bPm5zTTF+YpCrAU9nK0UgICYPT0QtT1NZWFv4s++TNkcgVaT0g6+4R2uI4MjQjzysHB1zxuWL50hzaeXiw==", - "dependencies": { - "mime-db": "1.52.0" - }, - "engines": { - "node": ">= 0.6" - } - }, - "node_modules/mimic-fn": { - "version": "2.1.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-2.1.0.tgz", - "integrity": "sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==", - "engines": { - "node": ">=6" - } - }, - "node_modules/minimist": { - "version": "1.2.8", - "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", - "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", - "funding": { - "url": "https://github.com/sponsors/ljharb" - } - }, - "node_modules/nan": { - "version": "2.18.0", - "resolved": "https://registry.npmjs.org/nan/-/nan-2.18.0.tgz", - "integrity": "sha512-W7tfG7vMOGtD30sHoZSSc/JVYiyDPEyQVso/Zz+/uQd0B0L46gtC+pHha5FFMRpil6fm/AoEcRWyOVi4+E/f8w==" - }, - "node_modules/no-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/no-case/-/no-case-3.0.4.tgz", - "integrity": "sha512-fgAN3jGAh+RoxUGZHTSOLJIqUc2wmoBwGR4tbpNAKmmovFoWq0OdRkb0VkldReO2a2iBT/OEulG9XSUc10r3zg==", - "dependencies": { - "lower-case": "^2.0.2", - "tslib": "^2.0.3" - } - }, - "node_modules/node-fetch-native": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/node-fetch-native/-/node-fetch-native-1.0.1.tgz", - "integrity": "sha512-VzW+TAk2wE4X9maiKMlT+GsPU4OMmR1U9CrHSmd3DFLn2IcZ9VJ6M6BBugGfYUnPCLSYxXdZy17M0BEJyhUTwg==" - }, - "node_modules/onetime": { - "version": "5.1.2", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.2.tgz", - "integrity": "sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==", - "dependencies": { - "mimic-fn": "^2.1.0" - }, - "engines": { - "node": ">=6" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/ora": { - "version": "7.0.1", - "resolved": "https://registry.npmjs.org/ora/-/ora-7.0.1.tgz", - "integrity": "sha512-0TUxTiFJWv+JnjWm4o9yvuskpEJLXTcng8MJuKd+SzAzp2o+OP3HWqNhB4OdJRt1Vsd9/mR0oyaEYlOnL7XIRw==", - "dependencies": { - "chalk": "^5.3.0", - "cli-cursor": "^4.0.0", - "cli-spinners": "^2.9.0", - "is-interactive": "^2.0.0", - "is-unicode-supported": "^1.3.0", - "log-symbols": "^5.1.0", - "stdin-discarder": "^0.1.0", - "string-width": "^6.1.0", - "strip-ansi": "^7.1.0" - }, - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/picocolors": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.0.tgz", - "integrity": "sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==" - }, - "node_modules/pvtsutils": { - "version": "1.3.5", - "resolved": "https://registry.npmjs.org/pvtsutils/-/pvtsutils-1.3.5.tgz", - "integrity": "sha512-ARvb14YB9Nm2Xi6nBq1ZX6dAM0FsJnuk+31aUp4TrcZEdKUlSqOqsxJHUPJDNE3qiIp+iUPEIeR6Je/tgV7zsA==", - "dependencies": { - "tslib": "^2.6.1" - } - }, - "node_modules/pvutils": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/pvutils/-/pvutils-1.1.3.tgz", - "integrity": "sha512-pMpnA0qRdFp32b1sJl1wOJNxZLQ2cbQx+k6tjNtZ8CpvVhNqEPRgivZ2WOUev2YMajecdH7ctUPDvEe87nariQ==", - "engines": { - "node": ">=6.0.0" - } - }, - "node_modules/quick-lru": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/quick-lru/-/quick-lru-4.0.1.tgz", - "integrity": "sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g==", - "engines": { - "node": ">=8" - } - }, - "node_modules/react": { - "version": "18.2.0", - "license": "MIT", - "peer": true, - "dependencies": { - "loose-envify": "^1.1.0" - }, - "engines": { - "node": ">=0.10.0" - } - }, - "node_modules/readable-stream": { - "version": "3.6.2", - "resolved": "https://registry.npmjs.org/readable-stream/-/readable-stream-3.6.2.tgz", - "integrity": "sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==", - "dependencies": { - "inherits": "^2.0.3", - "string_decoder": "^1.1.1", - "util-deprecate": "^1.0.1" - }, - "engines": { - "node": ">= 6" - } - }, - "node_modules/restore-cursor": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-4.0.0.tgz", - "integrity": "sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==", - "dependencies": { - "onetime": "^5.1.0", - "signal-exit": "^3.0.2" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/safe-buffer": { - "version": "5.2.1", - "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.2.1.tgz", - "integrity": "sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==", - "funding": [ - { - "type": "github", - "url": "https://github.com/sponsors/feross" - }, - { - "type": "patreon", - "url": "https://www.patreon.com/feross" - }, - { - "type": "consulting", - "url": "https://feross.org/support" - } - ] - }, - "node_modules/segfault-handler": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/segfault-handler/-/segfault-handler-1.3.0.tgz", - "integrity": "sha512-p7kVHo+4uoYkr0jmIiTBthwV5L2qmWtben/KDunDZ834mbos+tY+iO0//HpAJpOFSQZZ+wxKWuRo4DxV02B7Lg==", - "hasInstallScript": true, - "dependencies": { - "bindings": "^1.2.1", - "nan": "^2.14.0" - } - }, - "node_modules/signal-exit": { - "version": "3.0.7", - "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", - "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" - }, - "node_modules/sisteransi": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/sisteransi/-/sisteransi-1.0.5.tgz", - "integrity": "sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg==" - }, - "node_modules/snake-case": { - "version": "3.0.4", - "resolved": "https://registry.npmjs.org/snake-case/-/snake-case-3.0.4.tgz", - "integrity": "sha512-LAOh4z89bGQvl9pFfNF8V146i7o7/CqFPbqzYgP+yYzDIDeS9HaNFtXABamRW+AQzEVODcvE79ljJ+8a9YSdMg==", - "dependencies": { - "dot-case": "^3.0.4", - "tslib": "^2.0.3" - } - }, - "node_modules/snakecase-keys": { - "version": "3.2.1", - "resolved": "https://registry.npmjs.org/snakecase-keys/-/snakecase-keys-3.2.1.tgz", - "integrity": "sha512-CjU5pyRfwOtaOITYv5C8DzpZ8XA/ieRsDpr93HI2r6e3YInC6moZpSQbmUtg8cTk58tq2x3jcG2gv+p1IZGmMA==", - "dependencies": { - "map-obj": "^4.1.0", - "to-snake-case": "^1.0.0" - }, - "engines": { - "node": ">=8" - } - }, - "node_modules/stdin-discarder": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/stdin-discarder/-/stdin-discarder-0.1.0.tgz", - "integrity": "sha512-xhV7w8S+bUwlPTb4bAOUQhv8/cSS5offJuX8GQGq32ONF0ZtDWKfkdomM3HMRA+LhX6um/FZ0COqlwsjD53LeQ==", - "dependencies": { - "bl": "^5.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/string_decoder": { - "version": "1.3.0", - "resolved": "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz", - "integrity": "sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==", - "dependencies": { - "safe-buffer": "~5.2.0" - } - }, - "node_modules/string-width": { - "version": "6.1.0", - "resolved": "https://registry.npmjs.org/string-width/-/string-width-6.1.0.tgz", - "integrity": "sha512-k01swCJAgQmuADB0YIc+7TuatfNvTBVOoaUWJjTB9R4VJzR5vNWzf5t42ESVZFPS8xTySF7CAdV4t/aaIm3UnQ==", - "dependencies": { - "eastasianwidth": "^0.2.0", - "emoji-regex": "^10.2.1", - "strip-ansi": "^7.0.1" - }, - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/strip-ansi": { - "version": "7.1.0", - "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", - "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", - "dependencies": { - "ansi-regex": "^6.0.1" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/chalk/strip-ansi?sponsor=1" - } - }, - "node_modules/swr": { - "version": "2.2.0", - "resolved": "https://registry.npmjs.org/swr/-/swr-2.2.0.tgz", - "integrity": "sha512-AjqHOv2lAhkuUdIiBu9xbuettzAzWXmCEcLONNKJRba87WAefz8Ca9d6ds/SzrPc235n1IxWYdhJ2zF3MNUaoQ==", - "dependencies": { - "use-sync-external-store": "^1.2.0" - }, - "peerDependencies": { - "react": "^16.11.0 || ^17.0.0 || ^18.0.0" - } - }, - "node_modules/to-no-case": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/to-no-case/-/to-no-case-1.0.2.tgz", - "integrity": "sha512-Z3g735FxuZY8rodxV4gH7LxClE4H0hTIyHNIHdk+vpQxjLm0cwnKXq/OFVZ76SOQmto7txVcwSCwkU5kqp+FKg==" - }, - "node_modules/to-snake-case": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/to-snake-case/-/to-snake-case-1.0.0.tgz", - "integrity": "sha512-joRpzBAk1Bhi2eGEYBjukEWHOe/IvclOkiJl3DtA91jV6NwQ3MwXA4FHYeqk8BNp/D8bmi9tcNbRu/SozP0jbQ==", - "dependencies": { - "to-space-case": "^1.0.0" - } - }, - "node_modules/to-space-case": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/to-space-case/-/to-space-case-1.0.0.tgz", - "integrity": "sha512-rLdvwXZ39VOn1IxGL3V6ZstoTbwLRckQmn/U8ZDLuWwIXNpuZDhQ3AiRUlhTbOXFVE9C+dR51wM0CBDhk31VcA==", - "dependencies": { - "to-no-case": "^1.0.0" - } - }, - "node_modules/tslib": { - "version": "2.6.2", - "license": "0BSD" - }, - "node_modules/use-sync-external-store": { - "version": "1.2.0", - "resolved": "https://registry.npmjs.org/use-sync-external-store/-/use-sync-external-store-1.2.0.tgz", - "integrity": "sha512-eEgnFxGQ1Ife9bzYs6VLi8/4X6CObHMw9Qr9tPY43iKwsPw8xE8+EFsf/2cFZ5S3esXgpWgtSCtLNS41F+sKPA==", - "peerDependencies": { - "react": "^16.8.0 || ^17.0.0 || ^18.0.0" - } - }, - "node_modules/util-deprecate": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/util-deprecate/-/util-deprecate-1.0.2.tgz", - "integrity": "sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==" - }, - "node_modules/webcrypto-core": { - "version": "1.7.7", - "resolved": "https://registry.npmjs.org/webcrypto-core/-/webcrypto-core-1.7.7.tgz", - "integrity": "sha512-7FjigXNsBfopEj+5DV2nhNpfic2vumtjjgPmeDKk45z+MJwXKKfhPB7118Pfzrmh4jqOMST6Ch37iPAHoImg5g==", - "dependencies": { - "@peculiar/asn1-schema": "^2.3.6", - "@peculiar/json-schema": "^1.1.12", - "asn1js": "^3.0.1", - "pvtsutils": "^1.3.2", - "tslib": "^2.4.0" - } - }, - "node_modules/zod": { - "version": "3.22.4", - "resolved": "https://registry.npmjs.org/zod/-/zod-3.22.4.tgz", - "integrity": "sha512-iC+8Io04lddc+mVqQ9AZ7OQ2MrUKGN+oIQyq1vemgt46jwCwLfhq7/pwnBnNXXXZb8VTVLKwp9EDkx+ryxIWmg==", - "funding": { - "url": "https://github.com/sponsors/colinhacks" - } - } - } -} diff --git a/package.json b/package.json index 6d4f8af..117146d 100644 --- a/package.json +++ b/package.json @@ -11,14 +11,14 @@ }, "dependencies": { "@clack/prompts": "^0.7.0", - "@clerk/clerk-sdk-node": "^4.12.21", + "@clerk/backend": "^0.38.3", + "@clerk/clerk-sdk-node": "^4.13.11", + "@clerk/types": "^3.62.1", "bun": "^1.0.12", "csv-parser": "^3.0.0", "dotenv": "^16.3.1", "mime-types": "^2.1.35", - "ora": "^7.0.1", "picocolors": "^1.0.0", - "segfault-handler": "^1.3.0", "zod": "^3.22.4" }, "devDependencies": { diff --git a/src/cli.ts b/src/cli.ts index 16ff57e..df22d7f 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,8 +1,9 @@ import * as p from '@clack/prompts' import color from 'picocolors' -import { checkIfFileExists, createHandlerOptions, getFileType } from './functions' -// +import { checkIfFileExists, createHandlerOptions, getDateTimeStamp, getFileType } from './functions' +import { infoLogger } from './logger' + export const runCLI = async () => { p.intro(`${color.bgCyan(color.black('Clerk User Migration Utility'))}`) @@ -62,7 +63,7 @@ export const runCLI = async () => { ) if (args.begin) { - // console.log('Migration started') + infoLogger("Migration process started", getDateTimeStamp()) } return args diff --git a/src/functions.ts b/src/functions.ts index 4065dc6..641fc67 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -5,7 +5,7 @@ import mime from 'mime-types' import csvParser from 'csv-parser'; import * as z from "zod"; import * as p from '@clack/prompts' -import { logger } from './logger'; +import { validationLogger } from './logger'; const s = p.spinner() @@ -108,18 +108,22 @@ export const createHandlerOptions = () => { export const transformKeys = (data: Record, keys: any): Record => { const transformedData: Record = {}; - for (const key in data) { - if (data.hasOwnProperty(key)) { - let transformedKey = key; - if (keys.transformer[key]) transformedKey = keys.transformer[key] - - transformedData[transformedKey] = data[key]; + // for (const key in data) { + for (const [key, value] of Object.entries(data)) { + if (value !== "" && value !== '"{}"') { + if (data.hasOwnProperty(key)) { + let transformedKey = key; + if (keys.transformer[key]) transformedKey = keys.transformer[key] + + transformedData[transformedKey] = data[key]; + } } } return transformedData; }; + export const loadUsersFromFile = async (file: string, key: string): Promise => { const dateTime = getDateTimeStamp() s.start() @@ -153,8 +157,9 @@ export const loadUsersFromFile = async (file: string, key: string): Promise { +export const importUsers = async (users: User[]) => { const dateTime = getDateTimeStamp() s.start() diff --git a/src/logger.ts b/src/logger.ts index 15a4736..b0420fa 100644 --- a/src/logger.ts +++ b/src/logger.ts @@ -1,62 +1,107 @@ +import { ClerkAPIError } from '@clerk/types'; import fs from 'fs'; import path from 'path' +type ErrorPayload = { + userId: string; + status: string; + errors: ClerkAPIError[] +} + +type ValidationErrorPayload = { + error: string; + path: (string | number)[]; + row: number; +} + +type ErrorLog = { + type: string; + userId: string; + status: string; + error: string | undefined +} + const confirmOrCreateFolder = (path: string) => { - console.log('creating', path) try { if (!fs.existsSync(path)) { fs.mkdirSync(path); } } catch (err) { - console.error(err); + console.error( + '❌ Error creating directory for logs:', + err + ); } - } -export const logger = (type: "info" | "error" | "validator", payload: any, dateTime: string): void => { - - confirmOrCreateFolder(path.join(__dirname, '..', 'logs')) - console.log(type) - - if (type === "info") { - const infoPath = path.join(__dirname, '..', 'logs', 'info') +const logger = (payload: any, dateTime: string) => { + const logPath = path.join(__dirname, '..', 'logs') + confirmOrCreateFolder(logPath) - confirmOrCreateFolder(infoPath) + try { + if (!fs.existsSync(`${logPath}/${dateTime}.json`)) { + ; fs.writeFileSync( + `${logPath}/${dateTime}.json`, + JSON.stringify(payload, null, 2) + ); + } else { + const log = JSON.parse( + fs.readFileSync(`${logPath}/${dateTime}.json`, "utf-8") + ); + log.push(payload) + + fs.writeFileSync( + `${logPath}/${dateTime}.json`, + JSON.stringify(log, null, 2) + ); + } - fs.appendFileSync( - `${infoPath}/${dateTime}.json`, - `\n${JSON.stringify(payload, null, 2)}` + } catch (err) { + console.error( + '❌ Error creating directory for logs:', + err ); } +} - if (type === "error") { - const errorsPath = path.join(__dirname, '..', 'logs', 'errors') - console.log(errorsPath) - confirmOrCreateFolder(errorsPath) +export const infoLogger = (message: string, dateTime: string): void => { + confirmOrCreateFolder(path.join(__dirname, '..', 'logs')) + logger([{ message: message }], dateTime) +} +export const errorLogger = (payload: ErrorPayload, dateTime: string): void => { - fs.appendFileSync( - `${errorsPath}/${dateTime}.json`, - `\n${JSON.stringify(payload, null, 2)}` - ); + const errorsPath = path.join(__dirname, '..', 'logs') + confirmOrCreateFolder(errorsPath) - } + const errors: ErrorLog[] = [] + for (const err of payload.errors) { + const errorToLog = { + type: "User Creation Error", + userId: payload.userId, + status: payload.status, + error: err.longMessage - if (type === "validator") { - const validatorPath = path.join(__dirname, '..', 'logs', 'validator') - confirmOrCreateFolder(validatorPath) + } + errors.push((errorToLog)) + } + logger(errors, dateTime) +} +export const validationLogger = (payload: ValidationErrorPayload, dateTime: string): void => { + const errorsPath = path.join(__dirname, '..', 'logs') + confirmOrCreateFolder(errorsPath) - fs.appendFileSync( - `${validatorPath}/${dateTime}.json`, - `\n${JSON.stringify(payload, null, 2)}` - ); + const error = { + type: "Validation Error", + row: payload.row, + error: payload.error, + path: payload.path } - - + logger(error, dateTime) } From e959bc0d9c97c602957947bf5ebfaf6e585e1d2d Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sun, 3 Mar 2024 18:04:27 -0500 Subject: [PATCH 14/31] (wip) Added eslint --- .eslintrc.js | 33 +++++++++++++++++++++++++++++++++ package.json | 8 +++++++- src/envs-constants.ts | 3 --- src/logger.ts | 2 +- 4 files changed, 41 insertions(+), 5 deletions(-) create mode 100644 .eslintrc.js diff --git a/.eslintrc.js b/.eslintrc.js new file mode 100644 index 0000000..96ec2a0 --- /dev/null +++ b/.eslintrc.js @@ -0,0 +1,33 @@ +module.exports = { + "env": { + "browser": true, + "es2021": true + }, + "extends": [ + "eslint:recommended", + "plugin:@typescript-eslint/recommended" + ], + "overrides": [ + { + "env": { + "node": true + }, + "files": [ + ".eslintrc.{js,cjs}" + ], + "parserOptions": { + "sourceType": "script" + } + } + ], + "parser": "@typescript-eslint/parser", + "parserOptions": { + "ecmaVersion": "latest", + "sourceType": "module" + }, + "plugins": [ + "@typescript-eslint" + ], + "rules": { + } +} diff --git a/package.json b/package.json index 117146d..4367108 100644 --- a/package.json +++ b/package.json @@ -8,6 +8,9 @@ "license": "ISC", "scripts": { "start": "bun index.ts" + "start": "bun index.ts", + "lint": "eslint . --config .eslintrc.js", + "lint:fix": "eslint . --fix --config .eslintrc.js" }, "dependencies": { "@clack/prompts": "^0.7.0", @@ -22,6 +25,9 @@ "zod": "^3.22.4" }, "devDependencies": { - "@types/mime-types": "^2.1.4" + "@types/mime-types": "^2.1.4", + "@typescript-eslint/eslint-plugin": "^7.1.0", + "@typescript-eslint/parser": "^7.1.0", + "eslint": "^8.57.0", } } diff --git a/src/envs-constants.ts b/src/envs-constants.ts index c17edcd..56eed97 100644 --- a/src/envs-constants.ts +++ b/src/envs-constants.ts @@ -1,10 +1,7 @@ import { TypeOf, z } from 'zod' -import * as fs from 'fs'; -import * as path from 'path'; import { config } from "dotenv"; config(); -// require('dotenv').config() // TODO: Revisit if we need this. Left to easily implement export const withDevDefault = ( diff --git a/src/logger.ts b/src/logger.ts index b0420fa..432adb1 100644 --- a/src/logger.ts +++ b/src/logger.ts @@ -41,7 +41,7 @@ const logger = (payload: any, dateTime: string) => { try { if (!fs.existsSync(`${logPath}/${dateTime}.json`)) { - ; fs.writeFileSync( + fs.writeFileSync( `${logPath}/${dateTime}.json`, JSON.stringify(payload, null, 2) ); From 34f376859660fca0e3bf6341e38b2e655bdadcf5 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sun, 3 Mar 2024 18:18:32 -0500 Subject: [PATCH 15/31] (wip) --- .prettierignore | 6 ++++++ package.json | 4 +++- 2 files changed, 9 insertions(+), 1 deletion(-) create mode 100644 .prettierignore diff --git a/.prettierignore b/.prettierignore new file mode 100644 index 0000000..9d81cca --- /dev/null +++ b/.prettierignore @@ -0,0 +1,6 @@ +/logs/** +/samples/** +**.json +**.csv + + diff --git a/package.json b/package.json index 4367108..e79478c 100644 --- a/package.json +++ b/package.json @@ -7,7 +7,6 @@ "keywords": [], "license": "ISC", "scripts": { - "start": "bun index.ts" "start": "bun index.ts", "lint": "eslint . --config .eslintrc.js", "lint:fix": "eslint . --fix --config .eslintrc.js" @@ -29,5 +28,8 @@ "@typescript-eslint/eslint-plugin": "^7.1.0", "@typescript-eslint/parser": "^7.1.0", "eslint": "^8.57.0", + "eslint-config-prettier": "^9.1.0", + "eslint-plugin-prettier": "^5.1.3", + "prettier": "^3.2.5" } } From 28b852e2bbf5fa65375b7103b1f90bff3bfccded Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sun, 3 Mar 2024 22:07:16 -0500 Subject: [PATCH 16/31] (wip) Added prettier --- .eslintrc.js | 54 ++--- .prettierignore | 4 +- .prettierrc.js | 12 ++ LICENSE.Apache-2.0.md | 366 ++++++++++++++++---------------- README.md | 48 +++-- index.ts | 20 +- package.json | 4 +- src/cli.ts | 72 ++++--- src/envs-constants.ts | 21 +- src/functions.ts | 132 ++++++------ src/handlers/authjsHandler.ts | 12 +- src/handlers/clerkHandler.ts | 13 +- src/handlers/supabaseHandler.ts | 10 +- src/import-users.ts | 89 ++++---- src/logger.ts | 89 ++++---- src/spinner.ts | 6 +- 16 files changed, 477 insertions(+), 475 deletions(-) create mode 100644 .prettierrc.js diff --git a/.eslintrc.js b/.eslintrc.js index 96ec2a0..192b338 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -1,33 +1,25 @@ module.exports = { - "env": { - "browser": true, - "es2021": true + env: { + browser: true, + es2021: true, + }, + extends: ["eslint:recommended", "plugin:@typescript-eslint/recommended"], + overrides: [ + { + env: { + node: true, + }, + files: [".eslintrc.{js,cjs}"], + parserOptions: { + sourceType: "script", + }, }, - "extends": [ - "eslint:recommended", - "plugin:@typescript-eslint/recommended" - ], - "overrides": [ - { - "env": { - "node": true - }, - "files": [ - ".eslintrc.{js,cjs}" - ], - "parserOptions": { - "sourceType": "script" - } - } - ], - "parser": "@typescript-eslint/parser", - "parserOptions": { - "ecmaVersion": "latest", - "sourceType": "module" - }, - "plugins": [ - "@typescript-eslint" - ], - "rules": { - } -} + ], + parser: "@typescript-eslint/parser", + parserOptions: { + ecmaVersion: "latest", + sourceType: "module", + }, + plugins: ["@typescript-eslint"], + rules: {}, +}; diff --git a/.prettierignore b/.prettierignore index 9d81cca..999a527 100644 --- a/.prettierignore +++ b/.prettierignore @@ -1,6 +1,6 @@ /logs/** /samples/** -**.json -**.csv +**/*.json +**/*.csv diff --git a/.prettierrc.js b/.prettierrc.js new file mode 100644 index 0000000..f651c0e --- /dev/null +++ b/.prettierrc.js @@ -0,0 +1,12 @@ +module.exports = { + prettier: { + trailingComma: "es5", + tabWidth: 2, + semi: false, + singleQuote: true, + printWidth: 80, + semi: true, + bracketSpacing: true, + arrowParans: "always", + }, +}; diff --git a/LICENSE.Apache-2.0.md b/LICENSE.Apache-2.0.md index 559cd29..db2b9a9 100644 --- a/LICENSE.Apache-2.0.md +++ b/LICENSE.Apache-2.0.md @@ -2,180 +2,180 @@ Version 2.0, January 2004 http://www.apache.org/licenses/ - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. +TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + +1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + +2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + +3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + +4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + +5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + +6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + +7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + +8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + +9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + +END OF TERMS AND CONDITIONS + +APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "[]" @@ -186,16 +186,16 @@ same "printed page" as the copyright notice for easier identification within third-party archives. - Copyright 2023 Clerk Inc +Copyright 2023 Clerk Inc - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at +Licensed under the Apache License, Version 2.0 (the "License"); +you may not use this file except in compliance with the License. +You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. +Unless required by applicable law or agreed to in writing, software +distributed under the License is distributed on an "AS IS" BASIS, +WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +See the License for the specific language governing permissions and +limitations under the License. diff --git a/README.md b/README.md index c1a4467..731cde7 100644 --- a/README.md +++ b/README.md @@ -17,20 +17,21 @@ npm install ``` ### Users.json file -Create a `users.json` file. This file should be populated with all the users that need to be imported. The users should pass this schema: +Create a `users.json` file. This file should be populated with all the users that need to be imported. The users should pass this schema: ```ts [ { - "userId": "string", - "email": "email", - "firstName": "string (optional)", - "lastName": "string (optional)", - "password": "string (optional)", - "passwordHasher": "argon2 | argon | bcrypt | md5 | pbkdf2_sha256 | pbkdf2_sha256_django | pbkdf2_sha1 | scrypt_firebase", - } -] + userId: "string", + email: "email", + firstName: "string (optional)", + lastName: "string (optional)", + password: "string (optional)", + passwordHasher: + "argon2 | argon | bcrypt | md5 | pbkdf2_sha256 | pbkdf2_sha256_django | pbkdf2_sha1 | scrypt_firebase", + }, +]; ``` The only required fields are `userId` and `email`. First and last names can be added if available. Clerk will also accept hashed password values along with the hashing algorithm used (the default is `bcrypt`). @@ -49,12 +50,12 @@ Here are a couple examples. "userId": "2", "email": "john@blurp.com", "password": "$2a$10$N9qo8uLOickgx2ZMRZoMyeIjZAgcfl7p92ldGxad68LJZdL17lhWy", - "passwordHasher": "bcrypt" // default value + "passwordHasher": "bcrypt" // default value } ] ``` -The samples/ folder contains some samples, including issues that will produce errors when running the import. +The samples/ folder contains some samples, including issues that will produce errors when running the import. ### Secret Key @@ -78,12 +79,12 @@ The script can be run on the same data multiple times, Clerk automatically uses The script can be configured through the following environment variables: -| Variable | Description | Default | -| -------- | ----------- | ------- | -| `CLERK_SECRET_KEY` | Your Clerk secret key | `undefined` | -| `DELAY_MS` | Delay between requests to respect rate limits | `1000` | -| `RETRY_DELAY_MS` | Delay when the rate limit is hit | `10000` | -| `OFFSET` | Offset to start migration (number of users to skip) | `0` | +| Variable | Description | Default | +| ------------------ | --------------------------------------------------- | ----------- | +| `CLERK_SECRET_KEY` | Your Clerk secret key | `undefined` | +| `DELAY_MS` | Delay between requests to respect rate limits | `1000` | +| `RETRY_DELAY_MS` | Delay when the rate limit is hit | `10000` | +| `OFFSET` | Offset to start migration (number of users to skip) | `0` | ## Handling the Foreign Key constraint @@ -93,21 +94,23 @@ If you were using a database, you will have data tied to your previous auth syst Our sessions allow for conditional expressions. This would allow you add a session claim that will return either the `externalId` (the previous id for your user) when it exists, or the `userId` from Clerk. This will result in your imported users returning their `externalId` while newer users will return the Clerk `userId`. -In your Dashboard, go to Sessions -> Edit. Add the following: +In your Dashboard, go to Sessions -> Edit. Add the following: ```json { - "userId": "{{user.external_id || user.id}}" + "userId": "{{user.external_id || user.id}}" } ``` You can now access this value using the following: -```ts + +```ts const { sessionClaims } = auth(); -console.log(sessionClaims.userId) +console.log(sessionClaims.userId); ``` -You can add the following for typescript: +You can add the following for typescript: + ```js // types/global.d.ts @@ -125,4 +128,3 @@ declare global { You could continue to generate unique ids for the database as done previously, and then store those in `externalId`. This way all users would have an `externalId` that would be used for DB interactions. You could add a column in your user table inside of your database called `ClerkId`. Use that column to store the userId from Clerk directly into your database. - diff --git a/index.ts b/index.ts index acd72df..bfaa90b 100755 --- a/index.ts +++ b/index.ts @@ -6,21 +6,25 @@ import { runCLI } from "./src/cli"; import { loadUsersFromFile } from "./src/functions"; import { importUsers } from "./src/import-users"; -if (env.CLERK_SECRET_KEY.split("_")[1] !== "live" && env.IMPORT_TO_DEV === false) { +if ( + env.CLERK_SECRET_KEY.split("_")[1] !== "live" && + env.IMPORT_TO_DEV === false +) { throw new Error( - "The Clerk Secret Key provided is for a development instance. Development instances are limited to 500 users and do not share their userbase with production instances. If you want to import users to your development instance, please set 'IMPORT_TO_DEV_INSTANCE' in your .env to 'true'." + "The Clerk Secret Key provided is for a development instance. Development instances are limited to 500 users and do not share their userbase with production instances. If you want to import users to your development instance, please set 'IMPORT_TO_DEV_INSTANCE' in your .env to 'true'.", ); } async function main() { - const args = await runCLI() + const args = await runCLI(); - const users = await loadUsersFromFile(args.file, args.key) + const users = await loadUsersFromFile(args.file, args.key); - const usersToImport = users.slice(parseInt(args.offset) > env.OFFSET ? parseInt(args.offset) : env.OFFSET); - - importUsers(usersToImport) + const usersToImport = users.slice( + parseInt(args.offset) > env.OFFSET ? parseInt(args.offset) : env.OFFSET, + ); + importUsers(usersToImport); } -main() +main(); diff --git a/package.json b/package.json index e79478c..80ce8be 100644 --- a/package.json +++ b/package.json @@ -9,7 +9,9 @@ "scripts": { "start": "bun index.ts", "lint": "eslint . --config .eslintrc.js", - "lint:fix": "eslint . --fix --config .eslintrc.js" + "lint:fix": "eslint . --fix --config .eslintrc.js", + "prettier": "prettier . --write", + "prettier:test": "prettier ." }, "dependencies": { "@clack/prompts": "^0.7.0", diff --git a/src/cli.ts b/src/cli.ts index df22d7f..f0bf1be 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,72 +1,78 @@ - -import * as p from '@clack/prompts' -import color from 'picocolors' -import { checkIfFileExists, createHandlerOptions, getDateTimeStamp, getFileType } from './functions' -import { infoLogger } from './logger' +import * as p from "@clack/prompts"; +import color from "picocolors"; +import { + checkIfFileExists, + createHandlerOptions, + getDateTimeStamp, + getFileType, +} from "./functions"; +import { infoLogger } from "./logger"; export const runCLI = async () => { - p.intro(`${color.bgCyan(color.black('Clerk User Migration Utility'))}`) + p.intro(`${color.bgCyan(color.black("Clerk User Migration Utility"))}`); - const options = createHandlerOptions() + const options = createHandlerOptions(); const args = await p.group( { key: () => p.select({ - message: 'What platform are you migrating your users from?', + message: "What platform are you migrating your users from?", initialValue: options[0].value, maxItems: 1, - options: options + options: options, }), file: () => p.text({ - message: 'Specify the file to use for importing your users', - initialValue: 'users.json', - placeholder: 'users.json', + message: "Specify the file to use for importing your users", + initialValue: "users.json", + placeholder: "users.json", validate: (value) => { if (!checkIfFileExists(value)) { - return "That file does not exist. Please try again" + return "That file does not exist. Please try again"; } - if (getFileType(value) !== 'text/csv' && getFileType(value) !== 'application/json') { - return 'Please supply a valid JSON or CSV file' + if ( + getFileType(value) !== "text/csv" && + getFileType(value) !== "application/json" + ) { + return "Please supply a valid JSON or CSV file"; } - } + }, }), instance: () => p.select({ - message: 'Are you importing your users into a production instance? Development instances are for testing and limited t0 500 users.', - initialValue: 'prod', + message: + "Are you importing your users into a production instance? Development instances are for testing and limited t0 500 users.", + initialValue: "prod", maxItems: 1, options: [ - { value: 'prod', label: 'Prodction' }, - { value: 'dev', label: 'Developetion' } - ] + { value: "prod", label: "Prodction" }, + { value: "dev", label: "Developetion" }, + ], }), offset: () => p.text({ - message: 'Specify an offset to begin importing from.', - defaultValue: '0', - placeholder: '0' + message: "Specify an offset to begin importing from.", + defaultValue: "0", + placeholder: "0", }), begin: () => p.confirm({ - message: 'Begin Migration?', + message: "Begin Migration?", initialValue: true, }), }, { onCancel: () => { - p.cancel('Migration cancelled.'); + p.cancel("Migration cancelled."); process.exit(0); }, - } - ) + }, + ); if (args.begin) { - infoLogger("Migration process started", getDateTimeStamp()) + infoLogger("Migration process started", getDateTimeStamp()); } - return args - -} - + return args; +}; diff --git a/src/envs-constants.ts b/src/envs-constants.ts index 56eed97..6983fff 100644 --- a/src/envs-constants.ts +++ b/src/envs-constants.ts @@ -1,5 +1,4 @@ - -import { TypeOf, z } from 'zod' +import { TypeOf, z } from "zod"; import { config } from "dotenv"; config(); @@ -7,26 +6,24 @@ config(); export const withDevDefault = ( schema: T, val: TypeOf, -) => (process.env['NODE_ENV'] !== 'production' ? schema.default(val) : schema) +) => (process.env["NODE_ENV"] !== "production" ? schema.default(val) : schema); const envSchema = z.object({ CLERK_SECRET_KEY: z.string(), DELAY: z.coerce.number().optional().default(550), RETRY_DELAY_MS: z.coerce.number().optional().default(10000), OFFSET: z.coerce.number().optional().default(0), - IMPORT_TO_DEV: z.coerce.boolean().optional().default(false) -}) + IMPORT_TO_DEV: z.coerce.boolean().optional().default(false), +}); -const parsed = envSchema.safeParse(process.env) +const parsed = envSchema.safeParse(process.env); if (!parsed.success) { console.error( - '❌ Invalid environment variables:', + "❌ Invalid environment variables:", JSON.stringify(parsed.error.format(), null, 4), - ) - process.exit(1) + ); + process.exit(1); } -export const env = parsed.data - - +export const env = parsed.data; diff --git a/src/functions.ts b/src/functions.ts index 641fc67..83a73ab 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -1,13 +1,12 @@ - -import fs from 'fs'; -import path from 'path' -import mime from 'mime-types' -import csvParser from 'csv-parser'; +import fs from "fs"; +import path from "path"; +import mime from "mime-types"; +import csvParser from "csv-parser"; import * as z from "zod"; -import * as p from '@clack/prompts' -import { validationLogger } from './logger'; +import * as p from "@clack/prompts"; +import { validationLogger } from "./logger"; -const s = p.spinner() +const s = p.spinner(); type Handler = { key: string; @@ -16,20 +15,24 @@ type Handler = { }; // Dynamically read what handlers are present and generate array for use in script -const handlersDirectory = path.join(__dirname, '/handlers'); +const handlersDirectory = path.join(__dirname, "/handlers"); export const handlers: Handler[] = []; const files = fs.readdirSync(handlersDirectory); files.forEach((file) => { - if (file.endsWith('.ts')) { + if (file.endsWith(".ts")) { const filePath = path.join(handlersDirectory, file); const handlerModule = require(filePath); - if (handlerModule.options && handlerModule.options.key && handlerModule.options.transformer) { + if ( + handlerModule.options && + handlerModule.options.key && + handlerModule.options.transformer + ) { handlers.push({ key: handlerModule.options.key, - label: handlerModule.options.label || '', - transformer: handlerModule.options.transformer + label: handlerModule.options.label || "", + transformer: handlerModule.options.transformer, }); } } @@ -58,39 +61,35 @@ export const userSchema = z.object({ export type User = z.infer; - // utility function to create file path const createImportFilePath = (file: string) => { - return path.join(__dirname, '..', file) -} - + return path.join(__dirname, "..", file); +}; // make sure the file exists. CLI will error if it doesn't export const checkIfFileExists = (file: string) => { if (fs.existsSync(createImportFilePath(file))) { - return true - } - else { - return false + return true; + } else { + return false; } -} +}; // get the file type so we can verify if this is a JSON or CSV export const getFileType = (file: string) => { - return mime.lookup(createImportFilePath(file)) -} + return mime.lookup(createImportFilePath(file)); +}; export const getDateTimeStamp = () => { return new Date().toISOString().split(".")[0]; // YYYY-MM-DDTHH:mm:ss - -} +}; // emulate what Clack CLI expects for an option in a Select / MultiSelect export type OptionType = { value: string; label: string | undefined; hint?: string | undefined; -} +}; // handlers is an array created from the files in /src/validators // generate an array of options for use in the CLI @@ -98,22 +97,24 @@ export const createHandlerOptions = () => { const options: OptionType[] = []; for (const handler of handlers) { - options.push({ "value": handler.key, "label": handler.label }) + options.push({ value: handler.key, label: handler.label }); } - return options -} + return options; +}; // transform incoming data datas to match default schema // TODO : Remove any -- not sure how to handle this -export const transformKeys = (data: Record, keys: any): Record => { - +export const transformKeys = ( + data: Record, + keys: any, +): Record => { const transformedData: Record = {}; // for (const key in data) { for (const [key, value] of Object.entries(data)) { if (value !== "" && value !== '"{}"') { if (data.hasOwnProperty(key)) { let transformedKey = key; - if (keys.transformer[key]) transformedKey = keys.transformer[key] + if (keys.transformer[key]) transformedKey = keys.transformer[key]; transformedData[transformedKey] = data[key]; } @@ -122,73 +123,76 @@ export const transformKeys = (data: Record, keys: any): Record => { + const dateTime = getDateTimeStamp(); + s.start(); + s.message("Loading users and perparing to migrate"); + const type = getFileType(createImportFilePath(file)); -export const loadUsersFromFile = async (file: string, key: string): Promise => { - const dateTime = getDateTimeStamp() - s.start() - s.message('Loading users and perparing to migrate') - - const type = getFileType(createImportFilePath(file)) - - const transformerKeys = handlers.find(obj => obj.key === key); + const transformerKeys = handlers.find((obj) => obj.key === key); // convert a CSV to JSON and return array if (type === "text/csv") { - const users: User[] = []; return new Promise((resolve, reject) => { fs.createReadStream(createImportFilePath(file)) .pipe(csvParser()) - .on('data', (data) => { - users.push(data) - }) - .on('error', (err) => reject(err)) - .on('end', () => { - resolve(users) + .on("data", (data) => { + users.push(data); }) + .on("error", (err) => reject(err)) + .on("end", () => { + resolve(users); + }); }); - // if the file is already JSON, just read and parse and return the result + // if the file is already JSON, just read and parse and return the result } else { - const users: User[] = JSON.parse( - fs.readFileSync(createImportFilePath(file), "utf-8") + fs.readFileSync(createImportFilePath(file), "utf-8"), ); const transformedData: User[] = []; // for (const user of users) { for (let i = 0; i < users.length; i++) { - const transformedUser = transformKeys(users[i], transformerKeys) + const transformedUser = transformKeys(users[i], transformerKeys); - const validationResult = userSchema.safeParse(transformedUser) + const validationResult = userSchema.safeParse(transformedUser); // Check if validation was successful if (validationResult.success) { // The data is valid according to the original schema const validatedData = validationResult.data; - transformedData.push(validatedData) + transformedData.push(validatedData); } else { // The data is not valid, handle errors - validationLogger({ error: `${validationResult.error.errors[0].code} for required field.`, path: validationResult.error.errors[0].path, row: i }, dateTime) + validationLogger( + { + error: `${validationResult.error.errors[0].code} for required field.`, + path: validationResult.error.errors[0].path, + row: i, + }, + dateTime, + ); } - i++ + i++; } - s.stop('Users Loaded') + s.stop("Users Loaded"); // p.log.step('Users loaded') - return transformedData + return transformedData; } -} - +}; // Make sure that Auth.js is the first option for the script export const authjsFirstSort = (a: any, b: any): number => { // If 'authjs' is present in either 'a' or 'b', prioritize it - if (a.key === 'authjs') return -1; - if (b.key === 'authjs') return 1; + if (a.key === "authjs") return -1; + if (b.key === "authjs") return 1; // Otherwise, maintain the original order return 0; }; - - diff --git a/src/handlers/authjsHandler.ts b/src/handlers/authjsHandler.ts index 645dd6f..77b85e0 100644 --- a/src/handlers/authjsHandler.ts +++ b/src/handlers/authjsHandler.ts @@ -1,12 +1,10 @@ export const options = { - key: 'authjs', - label: 'Authjs (Next-Auth)', + key: "authjs", + label: "Authjs (Next-Auth)", transformer: { id: "userId", email_addresses: "email", first_name: "firstName", - last_name: "lastName" - } - -} - + last_name: "lastName", + }, +}; diff --git a/src/handlers/clerkHandler.ts b/src/handlers/clerkHandler.ts index ef805f8..21b340f 100644 --- a/src/handlers/clerkHandler.ts +++ b/src/handlers/clerkHandler.ts @@ -1,6 +1,6 @@ export const options = { - key: 'clerk', - label: 'Clerk', + key: "clerk", + label: "Clerk", transformer: { id: "userId", email_addresses: "email", @@ -8,9 +8,6 @@ export const options = { last_name: "lastName", phone_number: "phoneNumber", password_digest: "passwordDigest", - password_hasher: "passwordHasher" - } -} - - - + password_hasher: "passwordHasher", + }, +}; diff --git a/src/handlers/supabaseHandler.ts b/src/handlers/supabaseHandler.ts index bbd1604..5f4214c 100644 --- a/src/handlers/supabaseHandler.ts +++ b/src/handlers/supabaseHandler.ts @@ -1,10 +1,10 @@ export const options = { - key: 'supabase', - label: 'Supabase', + key: "supabase", + label: "Supabase", transformer: { id: "userId", email_addresses: "email", first_name: "firstName", - last_name: "lastName" - } -} + last_name: "lastName", + }, +}; diff --git a/src/import-users.ts b/src/import-users.ts index 18d3958..e48f692 100644 --- a/src/import-users.ts +++ b/src/import-users.ts @@ -1,83 +1,82 @@ import clerkClient from "@clerk/clerk-sdk-node"; import { env } from "./envs-constants"; import { User, getDateTimeStamp, userSchema } from "./functions"; -import * as p from '@clack/prompts' +import * as p from "@clack/prompts"; import { errorLogger } from "./logger"; // TODO: This is likely not needed anymore type CliArgs = { - key: string, - file: string, - instance: string, - offest?: string, - begin: boolean -} + key: string; + file: string; + instance: string; + offest?: string; + begin: boolean; +}; -const s = p.spinner() -let migrated = 0 +const s = p.spinner(); +let migrated = 0; async function cooldown(ms: number) { await new Promise((r) => setTimeout(r, ms)); } - const createUser = (userData: User) => userData.password ? clerkClient.users.createUser({ - externalId: userData.userId, - emailAddress: [userData.email], - firstName: userData.firstName, - lastName: userData.lastName, - passwordDigest: userData.password, - passwordHasher: userData.passwordHasher, - }) + externalId: userData.userId, + emailAddress: [userData.email], + firstName: userData.firstName, + lastName: userData.lastName, + passwordDigest: userData.password, + passwordHasher: userData.passwordHasher, + }) : clerkClient.users.createUser({ - externalId: userData.userId, - emailAddress: [userData.email], - firstName: userData.firstName, - lastName: userData.lastName, - skipPasswordRequirement: true, - }); - + externalId: userData.userId, + emailAddress: [userData.email], + firstName: userData.firstName, + lastName: userData.lastName, + skipPasswordRequirement: true, + }); - -async function processUserToClerk(userData: User, total: number, dateTime: string) { +async function processUserToClerk( + userData: User, + total: number, + dateTime: string, +) { try { const parsedUserData = userSchema.safeParse(userData); if (!parsedUserData.success) { throw parsedUserData.error; } await createUser(parsedUserData.data); - migrated++ - s.message(`Migrating users: [${migrated}/${total}]`) - + migrated++; + s.message(`Migrating users: [${migrated}/${total}]`); } catch (error) { // Keep cooldown in case rate limit is reached as a fallback if the thread blocking fails if (error.status === 429) { - await cooldown(env.RETRY_DELAY_MS) + await cooldown(env.RETRY_DELAY_MS); return processUserToClerk(userData, total, dateTime); } if (error.status === "form_identifier_exists") { - console.log('ERROR', error) - + console.log("ERROR", error); } - errorLogger({ userId: userData.userId, status: error.status, errors: error.errors }, dateTime); + errorLogger( + { userId: userData.userId, status: error.status, errors: error.errors }, + dateTime, + ); } } - - export const importUsers = async (users: User[]) => { - - const dateTime = getDateTimeStamp() - s.start() - const total = users.length - s.message(`Migration users: [0/${total}]`) + const dateTime = getDateTimeStamp(); + s.start(); + const total = users.length; + s.message(`Migration users: [0/${total}]`); for (const user of users) { - await cooldown(env.DELAY) - await processUserToClerk(user, total, dateTime) + await cooldown(env.DELAY); + await processUserToClerk(user, total, dateTime); } - s.stop() - p.outro('Migration complete') -} + s.stop(); + p.outro("Migration complete"); +}; diff --git a/src/logger.ts b/src/logger.ts index 432adb1..bce64bb 100644 --- a/src/logger.ts +++ b/src/logger.ts @@ -1,25 +1,25 @@ -import { ClerkAPIError } from '@clerk/types'; -import fs from 'fs'; -import path from 'path' +import { ClerkAPIError } from "@clerk/types"; +import fs from "fs"; +import path from "path"; type ErrorPayload = { userId: string; status: string; - errors: ClerkAPIError[] -} + errors: ClerkAPIError[]; +}; type ValidationErrorPayload = { error: string; path: (string | number)[]; row: number; -} +}; type ErrorLog = { type: string; userId: string; status: string; - error: string | undefined -} + error: string | undefined; +}; const confirmOrCreateFolder = (path: string) => { try { @@ -27,81 +27,70 @@ const confirmOrCreateFolder = (path: string) => { fs.mkdirSync(path); } } catch (err) { - console.error( - '❌ Error creating directory for logs:', - err - ); + console.error("❌ Error creating directory for logs:", err); } -} - +}; const logger = (payload: any, dateTime: string) => { - const logPath = path.join(__dirname, '..', 'logs') - confirmOrCreateFolder(logPath) + const logPath = path.join(__dirname, "..", "logs"); + confirmOrCreateFolder(logPath); try { if (!fs.existsSync(`${logPath}/${dateTime}.json`)) { fs.writeFileSync( `${logPath}/${dateTime}.json`, - JSON.stringify(payload, null, 2) + JSON.stringify(payload, null, 2), ); } else { const log = JSON.parse( - fs.readFileSync(`${logPath}/${dateTime}.json`, "utf-8") + fs.readFileSync(`${logPath}/${dateTime}.json`, "utf-8"), ); - log.push(payload) + log.push(payload); fs.writeFileSync( `${logPath}/${dateTime}.json`, - JSON.stringify(log, null, 2) + JSON.stringify(log, null, 2), ); } - } catch (err) { - console.error( - '❌ Error creating directory for logs:', - err - ); + console.error("❌ Error creating directory for logs:", err); } -} - +}; export const infoLogger = (message: string, dateTime: string): void => { - confirmOrCreateFolder(path.join(__dirname, '..', 'logs')) - logger([{ message: message }], dateTime) -} + confirmOrCreateFolder(path.join(__dirname, "..", "logs")); + logger([{ message: message }], dateTime); +}; export const errorLogger = (payload: ErrorPayload, dateTime: string): void => { + const errorsPath = path.join(__dirname, "..", "logs"); + confirmOrCreateFolder(errorsPath); - const errorsPath = path.join(__dirname, '..', 'logs') - confirmOrCreateFolder(errorsPath) - - const errors: ErrorLog[] = [] + const errors: ErrorLog[] = []; for (const err of payload.errors) { - const errorToLog = { type: "User Creation Error", userId: payload.userId, status: payload.status, - error: err.longMessage - - } - errors.push((errorToLog)) + error: err.longMessage, + }; + errors.push(errorToLog); } - logger(errors, dateTime) -} - -export const validationLogger = (payload: ValidationErrorPayload, dateTime: string): void => { + logger(errors, dateTime); +}; - const errorsPath = path.join(__dirname, '..', 'logs') - confirmOrCreateFolder(errorsPath) +export const validationLogger = ( + payload: ValidationErrorPayload, + dateTime: string, +): void => { + const errorsPath = path.join(__dirname, "..", "logs"); + confirmOrCreateFolder(errorsPath); const error = { type: "Validation Error", row: payload.row, error: payload.error, - path: payload.path - - } - logger(error, dateTime) -} + path: payload.path, + }; + logger(error, dateTime); +}; diff --git a/src/spinner.ts b/src/spinner.ts index 4a466a0..f9b6b42 100644 --- a/src/spinner.ts +++ b/src/spinner.ts @@ -1,6 +1,6 @@ -import * as p from '@clack/prompts'; +import * as p from "@clack/prompts"; -p.intro('spinner start...'); +p.intro("spinner start..."); const spin = p.spinner(); const total = 10000; @@ -18,5 +18,5 @@ new Promise((resolve) => { }, 100); }).then(() => { spin.stop(`Done`); - p.outro('spinner stop...'); + p.outro("spinner stop..."); }); From 969c4a11ea817b76a7a615b8a6d05df7975a0123 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sun, 3 Mar 2024 22:09:57 -0500 Subject: [PATCH 17/31] (wip) Removed spinner that was added just for testing. --- src/spinner.ts | 22 ---------------------- 1 file changed, 22 deletions(-) delete mode 100644 src/spinner.ts diff --git a/src/spinner.ts b/src/spinner.ts deleted file mode 100644 index f9b6b42..0000000 --- a/src/spinner.ts +++ /dev/null @@ -1,22 +0,0 @@ -import * as p from "@clack/prompts"; - -p.intro("spinner start..."); - -const spin = p.spinner(); -const total = 10000; -let progress = 0; -spin.start(); - -new Promise((resolve) => { - const timer = setInterval(() => { - progress = Math.min(total, progress + 100); - if (progress >= total) { - clearInterval(timer); - resolve(true); - } - spin.message(`Loading packages [${progress}/${total}]`); // <=== - }, 100); -}).then(() => { - spin.stop(`Done`); - p.outro("spinner stop..."); -}); From 63fa0fd625a7e1260863d763fc72ea0647997580 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sun, 3 Mar 2024 22:10:56 -0500 Subject: [PATCH 18/31] (wip) Added Supabase JSON sample --- samples/supabase.json | 36 ++++++++++++++++++++++++++++++++++++ 1 file changed, 36 insertions(+) create mode 100644 samples/supabase.json diff --git a/samples/supabase.json b/samples/supabase.json new file mode 100644 index 0000000..74f8ebc --- /dev/null +++ b/samples/supabase.json @@ -0,0 +1,36 @@ +[ +{ + "instance_id": "00000000-0000-0000-0000-000000000000", + "id": "2971a33d-5b7c-4c11-b8fe-61b7f185f211", + "aud": "authenticated", + "role": "authenticated", + "email": "janedoe@clerk.dev", + "encrypted_password": "$2a$10$hg4EXrEHfcqoKhNtENsYCO5anpp/C9WCUAAAtXEqpZkdCcxL/hcGG", + "email_confirmed_at": "2024-02-22 14:34:45.631743+00", + "raw_app_meta_data": "{\"provider\":\"email\",\"providers\":[\"email\"]}", + "raw_user_meta_data": "{}", + "created_at": "2024-02-22 14:34:45.626071+00", + "updated_at": "2024-02-22 14:34:45.631967+00", + "confirmed_at": "2024-02-22 14:34:45.631743+00", + "email_change_confirm_status": "0", + "is_sso_user": "false", + "deleted_at": "" + }, +{ + "instance_id": "00000000-0000-0000-0000-000000000000", + "id": "2971a33d-5b7c-4c11-b8fe-61b7f185f234", + "aud": "authenticated", + "role": "authenticated", + "email": "johndoe@clerk.dev", + "encrypted_password": "$2a$10$hg4EXrEHfcqoKhNtENsYCO5anpp/C9WCUAAAtXEqpZkdCcxL/hcGG", + "email_confirmed_at": "2024-01-01 14:34:45.631743+00", + "raw_app_meta_data": "{\"provider\":\"email\",\"providers\":[\"email\"]}", + "raw_user_meta_data": "{}", + "created_at": "2024-02-22 14:34:45.626071+00", + "updated_at": "2024-02-22 14:34:45.631967+00", + "confirmed_at": "2024-02-22 14:34:45.631743+00", + "email_change_confirm_status": "0", + "is_sso_user": "false", + "deleted_at": "" + } +] From 6d5f6a4b593d0e503a8a381f352ea30b607c63b6 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sun, 3 Mar 2024 23:30:53 -0500 Subject: [PATCH 19/31] (wip) Added transform/validation to JSON and CSV --- src/functions.ts | 72 +++++++++++++++++++++++++++++------------------- 1 file changed, 43 insertions(+), 29 deletions(-) diff --git a/src/functions.ts b/src/functions.ts index 83a73ab..ade3c3e 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -123,6 +123,42 @@ export const transformKeys = ( return transformedData; }; +const transformUsers = (users: User[], key: string, dateTime: string) => { + + const transformerKeys = handlers.find((obj) => obj.key === key); + + // TODO: This block of code trims the users array from 2500 to 12500. + // This applies to smaller numbers. Pass in 10, get 5 back. + const transformedData: User[] = []; + console.log('USERS BEFORE', users.length) + for (let i = 0; i < users.length; i++) { + const transformedUser = transformKeys(users[i], transformerKeys); + + const validationResult = userSchema.safeParse(transformedUser); + + // Check if validation was successful + if (validationResult.success) { + // The data is valid according to the original schema + const validatedData = validationResult.data; + transformedData.push(validatedData); + } else { + // The data is not valid, handle errors + validationLogger( + { + error: `${validationResult.error.errors[0].code} for required field.`, + path: validationResult.error.errors[0].path, + row: i, + }, + dateTime, + ); + } + i++; + } + + console.log('USERS USERS', transformedData.length) + return transformedData +} + export const loadUsersFromFile = async ( file: string, key: string, @@ -133,7 +169,6 @@ export const loadUsersFromFile = async ( const type = getFileType(createImportFilePath(file)); - const transformerKeys = handlers.find((obj) => obj.key === key); // convert a CSV to JSON and return array if (type === "text/csv") { @@ -146,7 +181,8 @@ export const loadUsersFromFile = async ( }) .on("error", (err) => reject(err)) .on("end", () => { - resolve(users); + const transformedData: User[] = transformUsers(users, key, dateTime) + resolve(transformedData); }); }); @@ -154,33 +190,11 @@ export const loadUsersFromFile = async ( } else { const users: User[] = JSON.parse( fs.readFileSync(createImportFilePath(file), "utf-8"), - ); - - const transformedData: User[] = []; - // for (const user of users) { - for (let i = 0; i < users.length; i++) { - const transformedUser = transformKeys(users[i], transformerKeys); - - const validationResult = userSchema.safeParse(transformedUser); - - // Check if validation was successful - if (validationResult.success) { - // The data is valid according to the original schema - const validatedData = validationResult.data; - transformedData.push(validatedData); - } else { - // The data is not valid, handle errors - validationLogger( - { - error: `${validationResult.error.errors[0].code} for required field.`, - path: validationResult.error.errors[0].path, - row: i, - }, - dateTime, - ); - } - i++; - } + ) + + const transformedData: User[] = transformUsers(users, key, dateTime) + + console.log('USERS USERS', transformedData.length) s.stop("Users Loaded"); // p.log.step('Users loaded') return transformedData; From 242d0e7069e4958a5818309af34340f4ddb83354 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sun, 3 Mar 2024 23:51:41 -0500 Subject: [PATCH 20/31] Removed need for different handler filers, combined into one. --- src/cli.ts | 8 +++--- src/functions.ts | 42 +-------------------------- src/handlers.ts | 50 +++++++++++++++++++++++++++++++++ src/handlers/authjsHandler.ts | 10 ------- src/handlers/clerkHandler.ts | 13 --------- src/handlers/supabaseHandler.ts | 10 ------- 6 files changed, 55 insertions(+), 78 deletions(-) create mode 100644 src/handlers.ts delete mode 100644 src/handlers/authjsHandler.ts delete mode 100644 src/handlers/clerkHandler.ts delete mode 100644 src/handlers/supabaseHandler.ts diff --git a/src/cli.ts b/src/cli.ts index f0bf1be..7e2f632 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -2,25 +2,25 @@ import * as p from "@clack/prompts"; import color from "picocolors"; import { checkIfFileExists, - createHandlerOptions, getDateTimeStamp, getFileType, + } from "./functions"; import { infoLogger } from "./logger"; +import { handlers } from "./handlers"; export const runCLI = async () => { p.intro(`${color.bgCyan(color.black("Clerk User Migration Utility"))}`); - const options = createHandlerOptions(); const args = await p.group( { key: () => p.select({ message: "What platform are you migrating your users from?", - initialValue: options[0].value, + initialValue: handlers[0].value, maxItems: 1, - options: options, + options: handlers, }), file: () => p.text({ diff --git a/src/functions.ts b/src/functions.ts index ade3c3e..c153696 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -5,39 +5,10 @@ import csvParser from "csv-parser"; import * as z from "zod"; import * as p from "@clack/prompts"; import { validationLogger } from "./logger"; +import { handlers } from "./handlers"; const s = p.spinner(); -type Handler = { - key: string; - label: string; - transformer: any; -}; - -// Dynamically read what handlers are present and generate array for use in script -const handlersDirectory = path.join(__dirname, "/handlers"); -export const handlers: Handler[] = []; -const files = fs.readdirSync(handlersDirectory); - -files.forEach((file) => { - if (file.endsWith(".ts")) { - const filePath = path.join(handlersDirectory, file); - const handlerModule = require(filePath); - - if ( - handlerModule.options && - handlerModule.options.key && - handlerModule.options.transformer - ) { - handlers.push({ - key: handlerModule.options.key, - label: handlerModule.options.label || "", - transformer: handlerModule.options.transformer, - }); - } - } -}); - // default schema -- incoming data will be transformed to this format export const userSchema = z.object({ userId: z.string(), @@ -91,17 +62,6 @@ export type OptionType = { hint?: string | undefined; }; -// handlers is an array created from the files in /src/validators -// generate an array of options for use in the CLI -export const createHandlerOptions = () => { - const options: OptionType[] = []; - - for (const handler of handlers) { - options.push({ value: handler.key, label: handler.label }); - } - return options; -}; - // transform incoming data datas to match default schema // TODO : Remove any -- not sure how to handle this export const transformKeys = ( diff --git a/src/handlers.ts b/src/handlers.ts new file mode 100644 index 0000000..208d2dc --- /dev/null +++ b/src/handlers.ts @@ -0,0 +1,50 @@ +export const handlers = [ + { + key: "clerk", + value: "clerk", + label: "Clerk", + transformer: { + id: "userId", + email_addresses: "email", + first_name: "firstName", + last_name: "lastName", + phone_number: "phoneNumber", + password_digest: "passwordDigest", + password_hasher: "passwordHasher", + }, + }, + { + key: "authjs", + value: "authjs", + label: "Authjs (Next-Auth)", + transformer: { + id: "userId", + email_addresses: "email", + first_name: "firstName", + last_name: "lastName", + }, + }, + { + key: "supabase", + value: "supabase", + label: "Supabase", + transformer: { + id: "userId", + email_addresses: "email", + first_name: "firstName", + last_name: "lastName", + }, + }, + { + key: "auth0", + value: "auth0", + label: "Auth0", + transformer: { + id: "userId", + email_addresses: "email", + first_name: "firstName", + last_name: "lastName", + } + } +] + diff --git a/src/handlers/authjsHandler.ts b/src/handlers/authjsHandler.ts deleted file mode 100644 index 77b85e0..0000000 --- a/src/handlers/authjsHandler.ts +++ /dev/null @@ -1,10 +0,0 @@ -export const options = { - key: "authjs", - label: "Authjs (Next-Auth)", - transformer: { - id: "userId", - email_addresses: "email", - first_name: "firstName", - last_name: "lastName", - }, -}; diff --git a/src/handlers/clerkHandler.ts b/src/handlers/clerkHandler.ts deleted file mode 100644 index 21b340f..0000000 --- a/src/handlers/clerkHandler.ts +++ /dev/null @@ -1,13 +0,0 @@ -export const options = { - key: "clerk", - label: "Clerk", - transformer: { - id: "userId", - email_addresses: "email", - first_name: "firstName", - last_name: "lastName", - phone_number: "phoneNumber", - password_digest: "passwordDigest", - password_hasher: "passwordHasher", - }, -}; diff --git a/src/handlers/supabaseHandler.ts b/src/handlers/supabaseHandler.ts deleted file mode 100644 index 5f4214c..0000000 --- a/src/handlers/supabaseHandler.ts +++ /dev/null @@ -1,10 +0,0 @@ -export const options = { - key: "supabase", - label: "Supabase", - transformer: { - id: "userId", - email_addresses: "email", - first_name: "firstName", - last_name: "lastName", - }, -}; From 4af9dab96bc81b64604f42558d2d3d73e7b9b860 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Sun, 3 Mar 2024 23:53:40 -0500 Subject: [PATCH 21/31] Formatting --- src/cli.ts | 8 +------- src/functions.ts | 20 ++++++++------------ src/handlers.ts | 7 +++---- 3 files changed, 12 insertions(+), 23 deletions(-) diff --git a/src/cli.ts b/src/cli.ts index 7e2f632..ed0194b 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,18 +1,12 @@ import * as p from "@clack/prompts"; import color from "picocolors"; -import { - checkIfFileExists, - getDateTimeStamp, - getFileType, - -} from "./functions"; +import { checkIfFileExists, getDateTimeStamp, getFileType } from "./functions"; import { infoLogger } from "./logger"; import { handlers } from "./handlers"; export const runCLI = async () => { p.intro(`${color.bgCyan(color.black("Clerk User Migration Utility"))}`); - const args = await p.group( { key: () => diff --git a/src/functions.ts b/src/functions.ts index c153696..6ffe553 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -84,13 +84,12 @@ export const transformKeys = ( }; const transformUsers = (users: User[], key: string, dateTime: string) => { - const transformerKeys = handlers.find((obj) => obj.key === key); - // TODO: This block of code trims the users array from 2500 to 12500. + // TODO: This block of code trims the users array from 2500 to 12500. // This applies to smaller numbers. Pass in 10, get 5 back. const transformedData: User[] = []; - console.log('USERS BEFORE', users.length) + console.log("USERS BEFORE", users.length); for (let i = 0; i < users.length; i++) { const transformedUser = transformKeys(users[i], transformerKeys); @@ -115,9 +114,9 @@ const transformUsers = (users: User[], key: string, dateTime: string) => { i++; } - console.log('USERS USERS', transformedData.length) - return transformedData -} + console.log("USERS AFTER", transformedData.length); + return transformedData; +}; export const loadUsersFromFile = async ( file: string, @@ -129,7 +128,6 @@ export const loadUsersFromFile = async ( const type = getFileType(createImportFilePath(file)); - // convert a CSV to JSON and return array if (type === "text/csv") { const users: User[] = []; @@ -141,7 +139,7 @@ export const loadUsersFromFile = async ( }) .on("error", (err) => reject(err)) .on("end", () => { - const transformedData: User[] = transformUsers(users, key, dateTime) + const transformedData: User[] = transformUsers(users, key, dateTime); resolve(transformedData); }); }); @@ -150,11 +148,9 @@ export const loadUsersFromFile = async ( } else { const users: User[] = JSON.parse( fs.readFileSync(createImportFilePath(file), "utf-8"), - ) - - const transformedData: User[] = transformUsers(users, key, dateTime) + ); - console.log('USERS USERS', transformedData.length) + const transformedData: User[] = transformUsers(users, key, dateTime); s.stop("Users Loaded"); // p.log.step('Users loaded') return transformedData; diff --git a/src/handlers.ts b/src/handlers.ts index 208d2dc..72d2c81 100644 --- a/src/handlers.ts +++ b/src/handlers.ts @@ -44,7 +44,6 @@ export const handlers = [ email_addresses: "email", first_name: "firstName", last_name: "lastName", - } - } -] - + }, + }, +]; From c46c5e3a421ca4600462827bc0359f31e06781a9 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Mon, 4 Mar 2024 16:10:23 -0500 Subject: [PATCH 22/31] Bug fixes and minor updates --- src/cli.ts | 7 +------ src/functions.ts | 11 +++-------- src/import-users.ts | 8 ++++---- src/logger.ts | 3 ++- 4 files changed, 10 insertions(+), 19 deletions(-) diff --git a/src/cli.ts b/src/cli.ts index ed0194b..4bbeb78 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -1,7 +1,6 @@ import * as p from "@clack/prompts"; import color from "picocolors"; -import { checkIfFileExists, getDateTimeStamp, getFileType } from "./functions"; -import { infoLogger } from "./logger"; +import { checkIfFileExists, getFileType } from "./functions"; import { handlers } from "./handlers"; export const runCLI = async () => { @@ -64,9 +63,5 @@ export const runCLI = async () => { }, ); - if (args.begin) { - infoLogger("Migration process started", getDateTimeStamp()); - } - return args; }; diff --git a/src/functions.ts b/src/functions.ts index 6ffe553..90fc121 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -65,9 +65,9 @@ export type OptionType = { // transform incoming data datas to match default schema // TODO : Remove any -- not sure how to handle this export const transformKeys = ( - data: Record, + data: Record, keys: any, -): Record => { +): Record => { const transformedData: Record = {}; // for (const key in data) { for (const [key, value] of Object.entries(data)) { @@ -86,15 +86,12 @@ export const transformKeys = ( const transformUsers = (users: User[], key: string, dateTime: string) => { const transformerKeys = handlers.find((obj) => obj.key === key); - // TODO: This block of code trims the users array from 2500 to 12500. // This applies to smaller numbers. Pass in 10, get 5 back. const transformedData: User[] = []; - console.log("USERS BEFORE", users.length); for (let i = 0; i < users.length; i++) { const transformedUser = transformKeys(users[i], transformerKeys); const validationResult = userSchema.safeParse(transformedUser); - // Check if validation was successful if (validationResult.success) { // The data is valid according to the original schema @@ -111,10 +108,7 @@ const transformUsers = (users: User[], key: string, dateTime: string) => { dateTime, ); } - i++; } - - console.log("USERS AFTER", transformedData.length); return transformedData; }; @@ -158,6 +152,7 @@ export const loadUsersFromFile = async ( }; // Make sure that Auth.js is the first option for the script +// TODO: Is this needed? export const authjsFirstSort = (a: any, b: any): number => { // If 'authjs' is present in either 'a' or 'b', prioritize it if (a.key === "authjs") return -1; diff --git a/src/import-users.ts b/src/import-users.ts index e48f692..cace4aa 100644 --- a/src/import-users.ts +++ b/src/import-users.ts @@ -57,9 +57,9 @@ async function processUserToClerk( await cooldown(env.RETRY_DELAY_MS); return processUserToClerk(userData, total, dateTime); } - if (error.status === "form_identifier_exists") { - console.log("ERROR", error); - } + // if (error.status === "form_identifier_exists") { + // console.log("ERROR", error); + // } errorLogger( { userId: userData.userId, status: error.status, errors: error.errors }, dateTime, @@ -74,8 +74,8 @@ export const importUsers = async (users: User[]) => { s.message(`Migration users: [0/${total}]`); for (const user of users) { - await cooldown(env.DELAY); await processUserToClerk(user, total, dateTime); + await cooldown(env.DELAY); } s.stop(); p.outro("Migration complete"); diff --git a/src/logger.ts b/src/logger.ts index bce64bb..095c01b 100644 --- a/src/logger.ts +++ b/src/logger.ts @@ -37,9 +37,10 @@ const logger = (payload: any, dateTime: string) => { try { if (!fs.existsSync(`${logPath}/${dateTime}.json`)) { + const log = [payload]; fs.writeFileSync( `${logPath}/${dateTime}.json`, - JSON.stringify(payload, null, 2), + JSON.stringify(log, null, 2), ); } else { const log = JSON.parse( From cb11d762a9a367db546f556b40cff061055c5bc5 Mon Sep 17 00:00:00 2001 From: Jacob MG Evans Date: Mon, 4 Mar 2024 13:41:36 -0800 Subject: [PATCH 23/31] Fixed types with any on transform functions and used a newer .hasOwnProperty API --- index.ts | 2 +- src/functions.ts | 44 +++++++++++++++++++++----------------------- 2 files changed, 22 insertions(+), 24 deletions(-) diff --git a/index.ts b/index.ts index bfaa90b..91615d2 100755 --- a/index.ts +++ b/index.ts @@ -11,7 +11,7 @@ if ( env.IMPORT_TO_DEV === false ) { throw new Error( - "The Clerk Secret Key provided is for a development instance. Development instances are limited to 500 users and do not share their userbase with production instances. If you want to import users to your development instance, please set 'IMPORT_TO_DEV_INSTANCE' in your .env to 'true'.", + "The Clerk Secret Key provided is for a development instance. Development instances are limited to 500 users and do not share their userbase with production instances. If you want to import users to your development instance, please set 'IMPORT_TO_DEV' in your .env to 'true'.", ); } diff --git a/src/functions.ts b/src/functions.ts index 90fc121..f17965b 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -62,17 +62,18 @@ export type OptionType = { hint?: string | undefined; }; +// create a union of all transformer objects in handlers array +type KeyHandlerMap = (typeof handlers)[number]; + // transform incoming data datas to match default schema -// TODO : Remove any -- not sure how to handle this -export const transformKeys = ( +export function transformKeys( data: Record, - keys: any, -): Record => { - const transformedData: Record = {}; - // for (const key in data) { + keys: T, +): Record { + const transformedData = {}; for (const [key, value] of Object.entries(data)) { if (value !== "" && value !== '"{}"') { - if (data.hasOwnProperty(key)) { + if (Object.prototype.hasOwnProperty.call(data, key)) { let transformedKey = key; if (keys.transformer[key]) transformedKey = keys.transformer[key]; @@ -81,14 +82,22 @@ export const transformKeys = ( } } return transformedData; -}; - -const transformUsers = (users: User[], key: string, dateTime: string) => { - const transformerKeys = handlers.find((obj) => obj.key === key); +} +const transformUsers = ( + users: User[], + key: keyof (typeof handlers)[number], + dateTime: string, +) => { // This applies to smaller numbers. Pass in 10, get 5 back. const transformedData: User[] = []; for (let i = 0; i < users.length; i++) { + const transformerKeys = handlers.find((obj) => obj.key === key); + + if (transformerKeys === undefined) { + throw new Error("No transformer found for the specified key"); + } + const transformedUser = transformKeys(users[i], transformerKeys); const validationResult = userSchema.safeParse(transformedUser); @@ -114,7 +123,7 @@ const transformUsers = (users: User[], key: string, dateTime: string) => { export const loadUsersFromFile = async ( file: string, - key: string, + key: keyof (typeof handlers)[number], ): Promise => { const dateTime = getDateTimeStamp(); s.start(); @@ -150,14 +159,3 @@ export const loadUsersFromFile = async ( return transformedData; } }; - -// Make sure that Auth.js is the first option for the script -// TODO: Is this needed? -export const authjsFirstSort = (a: any, b: any): number => { - // If 'authjs' is present in either 'a' or 'b', prioritize it - if (a.key === "authjs") return -1; - if (b.key === "authjs") return 1; - - // Otherwise, maintain the original order - return 0; -}; From dea3101900ee3d801b40550bfd7d3ad674434717 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Mon, 4 Mar 2024 16:43:48 -0500 Subject: [PATCH 24/31] Updated Supabase handler, added sample, add code to add default field --- samples/supabase.csv | 3 +++ src/functions.ts | 27 ++++++++++++++++++++++++--- src/handlers.ts | 7 ++++++- 3 files changed, 33 insertions(+), 4 deletions(-) create mode 100644 samples/supabase.csv diff --git a/samples/supabase.csv b/samples/supabase.csv new file mode 100644 index 0000000..d4436c2 --- /dev/null +++ b/samples/supabase.csv @@ -0,0 +1,3 @@ +"instance_id","id","aud","role","email","encrypted_password","email_confirmed_at","invited_at","confirmation_token","confirmation_sent_at","recovery_token","recovery_sent_at","email_change_token_new","email_change","email_change_sent_at","last_sign_in_at","raw_app_meta_data","raw_user_meta_data","is_super_admin","created_at","updated_at","phone","phone_confirmed_at","phone_change","phone_change_token","phone_change_sent_at","confirmed_at","email_change_token_current","email_change_confirm_status","banned_until","reauthentication_token","reauthentication_sent_at","is_sso_user","deleted_at" +"00000000-0000-0000-0000-000000000000","76b196c8-d5c4-4907-9746-ed06ef829a67","authenticated","authenticated","test@test.com","$2a$10$9zQjO8IH4gX/jBn2j8WvquwtBrj8tK7t6FdGsx9nb7e8HzILjxl1m","2024-02-26 14:04:29.153624+00","","","","","","","","","","{""provider"":""email"",""providers"":[""email""]}","{}","","2024-02-26 14:04:29.140992+00","2024-02-26 14:04:29.154469+00","","","","","","2024-02-26 14:04:29.153624+00","","0","","","","false","" +"00000000-0000-0000-0000-000000000000","926f3b49-9687-4d05-8557-2673387a1f3c","authenticated","authenticated","test2@test2.com","$2a$10$4n9B5uDN1pV0m7xUAzRnsuZkEBnGBTQF7kr7u8/tmTMBDOZM2.yBy","2024-03-04 12:12:24.9778+00","","","","","","","","","","{""provider"":""email"",""providers"":[""email""]}","{}","","2024-03-04 12:12:24.968657+00","2024-03-04 12:12:24.978022+00","","","","","","2024-03-04 12:12:24.9778+00","","0","","","","false","" \ No newline at end of file diff --git a/src/functions.ts b/src/functions.ts index 90fc121..3f54490 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -112,6 +112,26 @@ const transformUsers = (users: User[], key: string, dateTime: string) => { return transformedData; }; +const addDefaultFields = (users: User[], key: string) => { + if (handlers.find((obj) => obj.key === key)?.defaults) { + const defaultFields = handlers.find((obj) => obj.key === key)?.defaults; + + console.log('defaults', defaultFields) + + const updatedUsers: User[] = [] + + for (const user of users) { + const updated = { ...user, ...defaultFields } + updatedUsers.push(updated) + } + + console.log('USERS', JSON.stringify(updatedUsers, null, 2)) + return updatedUsers + } else { + return users + } +} + export const loadUsersFromFile = async ( file: string, key: string, @@ -133,7 +153,8 @@ export const loadUsersFromFile = async ( }) .on("error", (err) => reject(err)) .on("end", () => { - const transformedData: User[] = transformUsers(users, key, dateTime); + const usersWithDefaultFields = addDefaultFields(users, key) + const transformedData: User[] = transformUsers(usersWithDefaultFields, key, dateTime); resolve(transformedData); }); }); @@ -143,8 +164,8 @@ export const loadUsersFromFile = async ( const users: User[] = JSON.parse( fs.readFileSync(createImportFilePath(file), "utf-8"), ); - - const transformedData: User[] = transformUsers(users, key, dateTime); + const usersWithDefaultFields = addDefaultFields(users, key) + const transformedData: User[] = transformUsers(usersWithDefaultFields, key, dateTime); s.stop("Users Loaded"); // p.log.step('Users loaded') return transformedData; diff --git a/src/handlers.ts b/src/handlers.ts index 72d2c81..1950238 100644 --- a/src/handlers.ts +++ b/src/handlers.ts @@ -30,10 +30,15 @@ export const handlers = [ label: "Supabase", transformer: { id: "userId", - email_addresses: "email", + email: "email", first_name: "firstName", last_name: "lastName", + encrypted_password: "password", + phone: "phone" }, + defaults: { + passwordHasher: "bcrypt" + } }, { key: "auth0", From 10fb7d94966bcecadfca3c712fabfe567a33034b Mon Sep 17 00:00:00 2001 From: Jacob MG Evans Date: Mon, 4 Mar 2024 14:38:01 -0800 Subject: [PATCH 25/31] improved Logger type and fixed updated type handling the undefined with an empty object fallback --- src/functions.ts | 38 +++++++++++++++++++++++++------------- src/logger.ts | 12 +++++++----- 2 files changed, 32 insertions(+), 18 deletions(-) diff --git a/src/functions.ts b/src/functions.ts index cd30166..6244a3d 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -123,23 +123,27 @@ const transformUsers = ( const addDefaultFields = (users: User[], key: string) => { if (handlers.find((obj) => obj.key === key)?.defaults) { - const defaultFields = handlers.find((obj) => obj.key === key)?.defaults; + const defaultFields = + handlers.find((obj) => obj.key === key)?.defaults ?? {}; - console.log('defaults', defaultFields) + console.log("defaults", defaultFields); - const updatedUsers: User[] = [] + const updatedUsers: User[] = []; for (const user of users) { - const updated = { ...user, ...defaultFields } - updatedUsers.push(updated) + const updated = { + ...user, + ...defaultFields, + }; + updatedUsers.push(updated); } - console.log('USERS', JSON.stringify(updatedUsers, null, 2)) - return updatedUsers + console.log("USERS", JSON.stringify(updatedUsers, null, 2)); + return updatedUsers; } else { - return users + return users; } -} +}; export const loadUsersFromFile = async ( file: string, @@ -162,8 +166,12 @@ export const loadUsersFromFile = async ( }) .on("error", (err) => reject(err)) .on("end", () => { - const usersWithDefaultFields = addDefaultFields(users, key) - const transformedData: User[] = transformUsers(usersWithDefaultFields, key, dateTime); + const usersWithDefaultFields = addDefaultFields(users, key); + const transformedData: User[] = transformUsers( + usersWithDefaultFields, + key, + dateTime, + ); resolve(transformedData); }); }); @@ -173,8 +181,12 @@ export const loadUsersFromFile = async ( const users: User[] = JSON.parse( fs.readFileSync(createImportFilePath(file), "utf-8"), ); - const usersWithDefaultFields = addDefaultFields(users, key) - const transformedData: User[] = transformUsers(usersWithDefaultFields, key, dateTime); + const usersWithDefaultFields = addDefaultFields(users, key); + const transformedData: User[] = transformUsers( + usersWithDefaultFields, + key, + dateTime, + ); s.stop("Users Loaded"); // p.log.step('Users loaded') return transformedData; diff --git a/src/logger.ts b/src/logger.ts index 095c01b..9fed588 100644 --- a/src/logger.ts +++ b/src/logger.ts @@ -21,6 +21,8 @@ type ErrorLog = { error: string | undefined; }; +type LogType = ErrorLog[] | ValidationErrorPayload | { message: string }[]; + const confirmOrCreateFolder = (path: string) => { try { if (!fs.existsSync(path)) { @@ -31,7 +33,7 @@ const confirmOrCreateFolder = (path: string) => { } }; -const logger = (payload: any, dateTime: string) => { +function logger(payload: T, dateTime: string) { const logPath = path.join(__dirname, "..", "logs"); confirmOrCreateFolder(logPath); @@ -56,14 +58,14 @@ const logger = (payload: any, dateTime: string) => { } catch (err) { console.error("❌ Error creating directory for logs:", err); } -}; +} -export const infoLogger = (message: string, dateTime: string): void => { +export const infoLogger = (message: string, dateTime: string) => { confirmOrCreateFolder(path.join(__dirname, "..", "logs")); logger([{ message: message }], dateTime); }; -export const errorLogger = (payload: ErrorPayload, dateTime: string): void => { +export const errorLogger = (payload: ErrorPayload, dateTime: string) => { const errorsPath = path.join(__dirname, "..", "logs"); confirmOrCreateFolder(errorsPath); @@ -83,7 +85,7 @@ export const errorLogger = (payload: ErrorPayload, dateTime: string): void => { export const validationLogger = ( payload: ValidationErrorPayload, dateTime: string, -): void => { +) => { const errorsPath = path.join(__dirname, "..", "logs"); confirmOrCreateFolder(errorsPath); From 1852a79a5bc4e04369aa6788bb4673d5d5892002 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Mon, 4 Mar 2024 17:42:00 -0500 Subject: [PATCH 26/31] Apply suggestions from code review Co-authored-by: Jacob M-G Evans <27247160+JacobMGEvans@users.noreply.github.com> --- src/cli.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/cli.ts b/src/cli.ts index 4bbeb78..3114dce 100644 --- a/src/cli.ts +++ b/src/cli.ts @@ -35,12 +35,12 @@ export const runCLI = async () => { instance: () => p.select({ message: - "Are you importing your users into a production instance? Development instances are for testing and limited t0 500 users.", + "Are you importing your users into a production instance? Development instances are for testing and limited to 500 users.", initialValue: "prod", maxItems: 1, options: [ - { value: "prod", label: "Prodction" }, - { value: "dev", label: "Developetion" }, + { value: "prod", label: "Production" }, + { value: "dev", label: "Development" }, ], }), offset: () => From 08ab75c2b44576b00afbb3be4395730d2500b96b Mon Sep 17 00:00:00 2001 From: Jacob MG Evans Date: Mon, 4 Mar 2024 14:52:05 -0800 Subject: [PATCH 27/31] Type passed loadUsersFromFile needs validation, handling with cast for now --- index.ts | 5 +++-- src/functions.ts | 27 +++++++++++++++------------ 2 files changed, 18 insertions(+), 14 deletions(-) diff --git a/index.ts b/index.ts index 91615d2..112da5c 100755 --- a/index.ts +++ b/index.ts @@ -3,7 +3,7 @@ config(); import { env } from "./src/envs-constants"; import { runCLI } from "./src/cli"; -import { loadUsersFromFile } from "./src/functions"; +import { TransformKeys, loadUsersFromFile } from "./src/functions"; import { importUsers } from "./src/import-users"; if ( @@ -18,7 +18,8 @@ if ( async function main() { const args = await runCLI(); - const users = await loadUsersFromFile(args.file, args.key); + // we can use Zod to validate the args.keys to ensure it is TransformKeys type + const users = await loadUsersFromFile(args.file, args.key as TransformKeys); const usersToImport = users.slice( parseInt(args.offset) > env.OFFSET ? parseInt(args.offset) : env.OFFSET, diff --git a/src/functions.ts b/src/functions.ts index 6244a3d..6f19b03 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -32,6 +32,19 @@ export const userSchema = z.object({ export type User = z.infer; +// emulate what Clack CLI expects for an option in a Select / MultiSelect +export type OptionType = { + value: string; + label: string | undefined; + hint?: string | undefined; +}; + +// create a union of all keys in the transformer object +export type TransformKeys = keyof (typeof handlers)[number]; + +// create a union of all transformer objects in handlers array +type KeyHandlerMap = (typeof handlers)[number]; + // utility function to create file path const createImportFilePath = (file: string) => { return path.join(__dirname, "..", file); @@ -55,16 +68,6 @@ export const getDateTimeStamp = () => { return new Date().toISOString().split(".")[0]; // YYYY-MM-DDTHH:mm:ss }; -// emulate what Clack CLI expects for an option in a Select / MultiSelect -export type OptionType = { - value: string; - label: string | undefined; - hint?: string | undefined; -}; - -// create a union of all transformer objects in handlers array -type KeyHandlerMap = (typeof handlers)[number]; - // transform incoming data datas to match default schema export function transformKeys( data: Record, @@ -86,7 +89,7 @@ export function transformKeys( const transformUsers = ( users: User[], - key: keyof (typeof handlers)[number], + key: TransformKeys, dateTime: string, ) => { // This applies to smaller numbers. Pass in 10, get 5 back. @@ -147,7 +150,7 @@ const addDefaultFields = (users: User[], key: string) => { export const loadUsersFromFile = async ( file: string, - key: keyof (typeof handlers)[number], + key: TransformKeys, ): Promise => { const dateTime = getDateTimeStamp(); s.start(); From 64b096e58ebb85a63e3b2eab7e38e1e5341daff6 Mon Sep 17 00:00:00 2001 From: Roy Anger Date: Mon, 4 Mar 2024 18:22:19 -0500 Subject: [PATCH 28/31] Updated Auth0 map --- package.json | 4 ++-- src/functions.ts | 1 + src/handlers.ts | 18 ++++++++++++------ 3 files changed, 15 insertions(+), 8 deletions(-) diff --git a/package.json b/package.json index 80ce8be..bf75d92 100644 --- a/package.json +++ b/package.json @@ -10,8 +10,8 @@ "start": "bun index.ts", "lint": "eslint . --config .eslintrc.js", "lint:fix": "eslint . --fix --config .eslintrc.js", - "prettier": "prettier . --write", - "prettier:test": "prettier ." + "format": "prettier . --write", + "format:test": "prettier ." }, "dependencies": { "@clack/prompts": "^0.7.0", diff --git a/src/functions.ts b/src/functions.ts index 6f19b03..97c49a4 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -13,6 +13,7 @@ const s = p.spinner(); export const userSchema = z.object({ userId: z.string(), email: z.string().email(), + username: z.string().optional(), firstName: z.string().optional(), lastName: z.string().optional(), password: z.string().optional(), diff --git a/src/handlers.ts b/src/handlers.ts index 1950238..03b2408 100644 --- a/src/handlers.ts +++ b/src/handlers.ts @@ -34,11 +34,11 @@ export const handlers = [ first_name: "firstName", last_name: "lastName", encrypted_password: "password", - phone: "phone" + phone: "phone", }, defaults: { - passwordHasher: "bcrypt" - } + passwordHasher: "bcrypt", + }, }, { key: "auth0", @@ -46,9 +46,15 @@ export const handlers = [ label: "Auth0", transformer: { id: "userId", - email_addresses: "email", - first_name: "firstName", - last_name: "lastName", + email: "email", + given_name: "firstName", + family_name: "lastName", + phone_number: "phone", + passwordHash: "password", + user_metadata: "publicMetadata", + }, + defaults: { + passwordHasher: "bcrypt", }, }, ]; From 802f34cfda0ce46eb14b74f747fd15a6dfc94657 Mon Sep 17 00:00:00 2001 From: Jacob MG Evans Date: Mon, 4 Mar 2024 15:53:30 -0800 Subject: [PATCH 29/31] handle merge conflict --- index.ts | 4 ++-- package.json | 6 ++++-- src/functions.test.ts | 36 ++++++++++++++++++++++++++++++++++++ src/functions.ts | 13 ++++++------- vitest.config.ts | 3 +++ 5 files changed, 51 insertions(+), 11 deletions(-) create mode 100644 src/functions.test.ts create mode 100644 vitest.config.ts diff --git a/index.ts b/index.ts index 112da5c..2222b6c 100755 --- a/index.ts +++ b/index.ts @@ -3,7 +3,7 @@ config(); import { env } from "./src/envs-constants"; import { runCLI } from "./src/cli"; -import { TransformKeys, loadUsersFromFile } from "./src/functions"; +import { loadUsersFromFile } from "./src/functions"; import { importUsers } from "./src/import-users"; if ( @@ -19,7 +19,7 @@ async function main() { const args = await runCLI(); // we can use Zod to validate the args.keys to ensure it is TransformKeys type - const users = await loadUsersFromFile(args.file, args.key as TransformKeys); + const users = await loadUsersFromFile(args.file, args.key); const usersToImport = users.slice( parseInt(args.offset) > env.OFFSET ? parseInt(args.offset) : env.OFFSET, diff --git a/package.json b/package.json index bf75d92..d2c3541 100644 --- a/package.json +++ b/package.json @@ -11,7 +11,8 @@ "lint": "eslint . --config .eslintrc.js", "lint:fix": "eslint . --fix --config .eslintrc.js", "format": "prettier . --write", - "format:test": "prettier ." + "format:test": "prettier .", + "test": "vitest" }, "dependencies": { "@clack/prompts": "^0.7.0", @@ -32,6 +33,7 @@ "eslint": "^8.57.0", "eslint-config-prettier": "^9.1.0", "eslint-plugin-prettier": "^5.1.3", - "prettier": "^3.2.5" + "prettier": "^3.2.5", + "vitest": "^1.3.1" } } diff --git a/src/functions.test.ts b/src/functions.test.ts new file mode 100644 index 0000000..603af6d --- /dev/null +++ b/src/functions.test.ts @@ -0,0 +1,36 @@ +import { expect, test } from "vitest"; +import { loadUsersFromFile } from "./functions"; + +test("loadUsersFromFile", async () => { + const user = await loadUsersFromFile("/samples/clerk.csv", "clerk"); + + expect(user).toMatchInlineSnapshot(` + [ + { + "email": "janedoe@clerk.dev", + "firstName": "Jane", + "lastName": "Doe", + "passwordHasher": "bcrypt", + "userId": "user_2YDryYFVMM1W1plDDKz7Gzf4we6", + }, + { + "email": "johndoe@gmail.com", + "firstName": "John", + "lastName": "Doe", + "userId": "user_2ZZCgLE7kJG2CRBxTZ6YUIvzS10", + }, + { + "email": "johnhncock@clerk.dev", + "firstName": "John", + "lastName": "Hancock", + "userId": "user_2cWszPHuo6P2lCdnhhZbVMfbAIC", + }, + { + "email": "janehancock@clerk.dev", + "firstName": "Jane", + "lastName": "Hancock", + "userId": "user_2cukOsyNsh0J3MCEvrgM6PkoB0I", + }, + ] + `); +}); diff --git a/src/functions.ts b/src/functions.ts index 97c49a4..e4c43cc 100644 --- a/src/functions.ts +++ b/src/functions.ts @@ -39,12 +39,11 @@ export type OptionType = { label: string | undefined; hint?: string | undefined; }; - -// create a union of all keys in the transformer object -export type TransformKeys = keyof (typeof handlers)[number]; +// create union of string literals from handlers transformer object keys +export type HandlerMapKeys = (typeof handlers)[number]["key"]; // create a union of all transformer objects in handlers array -type KeyHandlerMap = (typeof handlers)[number]; +type HandlerMapUnion = (typeof handlers)[number]; // utility function to create file path const createImportFilePath = (file: string) => { @@ -70,7 +69,7 @@ export const getDateTimeStamp = () => { }; // transform incoming data datas to match default schema -export function transformKeys( +export function transformKeys( data: Record, keys: T, ): Record { @@ -90,7 +89,7 @@ export function transformKeys( const transformUsers = ( users: User[], - key: TransformKeys, + key: HandlerMapKeys, dateTime: string, ) => { // This applies to smaller numbers. Pass in 10, get 5 back. @@ -151,7 +150,7 @@ const addDefaultFields = (users: User[], key: string) => { export const loadUsersFromFile = async ( file: string, - key: TransformKeys, + key: HandlerMapKeys, ): Promise => { const dateTime = getDateTimeStamp(); s.start(); diff --git a/vitest.config.ts b/vitest.config.ts new file mode 100644 index 0000000..8fb6f2d --- /dev/null +++ b/vitest.config.ts @@ -0,0 +1,3 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({}); From b10b1ccd799187754ca18d1f7cf815335e15f6fc Mon Sep 17 00:00:00 2001 From: Jacob MG Evans Date: Tue, 5 Mar 2024 22:50:54 -0800 Subject: [PATCH 30/31] Added more tests for file types for loadUsers & added errorLogger test --- src/functions.test.ts | 68 ++++++++++++++++++++++++++++++++++++------- src/import-users.ts | 14 ++++----- src/logger.test.ts | 37 +++++++++++++++++++++++ 3 files changed, 102 insertions(+), 17 deletions(-) create mode 100644 src/logger.test.ts diff --git a/src/functions.test.ts b/src/functions.test.ts index 603af6d..a726b30 100644 --- a/src/functions.test.ts +++ b/src/functions.test.ts @@ -1,10 +1,27 @@ import { expect, test } from "vitest"; import { loadUsersFromFile } from "./functions"; -test("loadUsersFromFile", async () => { - const user = await loadUsersFromFile("/samples/clerk.csv", "clerk"); +test("loadUsersFromFile CSV", async () => { + const userClerk = await loadUsersFromFile("/samples/clerk.csv", "clerk"); + const userSupabase = await loadUsersFromFile( + "/samples/supabase.csv", + "clerk", + ); - expect(user).toMatchInlineSnapshot(` + expect(userSupabase).toMatchInlineSnapshot(` + [ + { + "email": "test@test.com", + "userId": "76b196c8-d5c4-4907-9746-ed06ef829a67", + }, + { + "email": "test2@test2.com", + "userId": "926f3b49-9687-4d05-8557-2673387a1f3c", + }, + ] + `); + + expect(userClerk.slice(0, 2)).toMatchInlineSnapshot(` [ { "email": "janedoe@clerk.dev", @@ -19,18 +36,49 @@ test("loadUsersFromFile", async () => { "lastName": "Doe", "userId": "user_2ZZCgLE7kJG2CRBxTZ6YUIvzS10", }, + ] + `); +}); + +test("loadUsersFromFile JSON", async () => { + const userAuthjs = await loadUsersFromFile("/samples/authjs.json", "clerk"); + const userSupabase = await loadUsersFromFile( + "/samples/supabase.json", + "clerk", + ); + const userAuth0 = await loadUsersFromFile("/samples/auth0.json", "clerk"); + + expect(userAuthjs.slice(0, 2)).toMatchInlineSnapshot(` + [ { - "email": "johnhncock@clerk.dev", + "email": "john@example.com", "firstName": "John", - "lastName": "Hancock", - "userId": "user_2cWszPHuo6P2lCdnhhZbVMfbAIC", + "lastName": "Doe", + "password": "$2a$12$9HhLqMJxqBKhlZasxjlhger67GFcC4aOAtpcU.THpcSLiQve4mq6.", + "passwordHasher": "bcrypt", + "userId": "1", }, { - "email": "janehancock@clerk.dev", - "firstName": "Jane", - "lastName": "Hancock", - "userId": "user_2cukOsyNsh0J3MCEvrgM6PkoB0I", + "email": "alice@example.com", + "firstName": "Alice", + "lastName": "Smith", + "password": "$2a$12$9HhLqMJxqBKhlZasxjlhger67GFcC4aOAtpcU.THpcSLiQve4mq6.", + "passwordHasher": "bcrypt", + "userId": "2", + }, + ] + `); + expect(userSupabase).toMatchInlineSnapshot(` + [ + { + "email": "janedoe@clerk.dev", + "userId": "2971a33d-5b7c-4c11-b8fe-61b7f185f211", + }, + { + "email": "johndoe@clerk.dev", + "userId": "2971a33d-5b7c-4c11-b8fe-61b7f185f234", }, ] `); + expect(userAuth0).toMatchInlineSnapshot(`[]`); }); diff --git a/src/import-users.ts b/src/import-users.ts index cace4aa..3a0a913 100644 --- a/src/import-users.ts +++ b/src/import-users.ts @@ -5,13 +5,13 @@ import * as p from "@clack/prompts"; import { errorLogger } from "./logger"; // TODO: This is likely not needed anymore -type CliArgs = { - key: string; - file: string; - instance: string; - offest?: string; - begin: boolean; -}; +// type CliArgs = { +// key: string; +// file: string; +// instance: string; +// offest?: string; +// begin: boolean; +// }; const s = p.spinner(); let migrated = 0; diff --git a/src/logger.test.ts b/src/logger.test.ts new file mode 100644 index 0000000..dd2ef41 --- /dev/null +++ b/src/logger.test.ts @@ -0,0 +1,37 @@ +import { expect, test } from "vitest"; +import { errorLogger } from "./logger"; +import { readFileSync, existsSync, rmdirSync } from "node:fs"; + +test("errorLogger", () => { + const dateTime = "fake-date-time"; + + errorLogger( + { + errors: [ + { + code: "1234", + message: "isolinear chip failed to initialize, in jeffries tube 32", + }, + ], + status: "error", + userId: "123", + }, + dateTime, + ); + + expect(readFileSync("logs/fake-date-time.json", "utf8")) + .toMatchInlineSnapshot(` + "[ + [ + { + "type": "User Creation Error", + "userId": "123", + "status": "error" + } + ] + ]" + `); + + existsSync("logs/fake-date-time.json") && + rmdirSync("logs", { recursive: true }); +}); From bb0a9275985ada4e94b0bd72815026ec772d44c3 Mon Sep 17 00:00:00 2001 From: Jeff Escalante Date: Fri, 26 Jul 2024 17:10:34 -0400 Subject: [PATCH 31/31] fix pathing for windows --- src/logger.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/logger.ts b/src/logger.ts index 9fed588..37b162f 100644 --- a/src/logger.ts +++ b/src/logger.ts @@ -38,20 +38,20 @@ function logger(payload: T, dateTime: string) { confirmOrCreateFolder(logPath); try { - if (!fs.existsSync(`${logPath}/${dateTime}.json`)) { + if (!fs.existsSync(`${logPath}${path.sep}${dateTime}.json`)) { const log = [payload]; fs.writeFileSync( - `${logPath}/${dateTime}.json`, + `${logPath}${path.sep}${dateTime}.json`, JSON.stringify(log, null, 2), ); } else { const log = JSON.parse( - fs.readFileSync(`${logPath}/${dateTime}.json`, "utf-8"), + fs.readFileSync(`${logPath}${path.sep}${dateTime}.json`, "utf-8"), ); log.push(payload); fs.writeFileSync( - `${logPath}/${dateTime}.json`, + `${logPath}${path.sep}${dateTime}.json`, JSON.stringify(log, null, 2), ); }