diff --git a/packages/core/src/bin/commands/createViews.ts b/packages/core/src/bin/commands/createViews.ts index 547d22af2..688a92eff 100644 --- a/packages/core/src/bin/commands/createViews.ts +++ b/packages/core/src/bin/commands/createViews.ts @@ -111,8 +111,20 @@ export async function createViews({ return; } + const database = createDatabase({ + common, + // Note: `namespace` is not used in this command + namespace: { + schema: cliOptions.schema!, + viewsSchema: undefined, + }, + preBuild: buildResult.result, + schemaBuild: emptySchemaBuild, + }); + const databaseDiagnostic = await build.databaseDiagnostic({ preBuild: buildResult.result, + database, }); if (databaseDiagnostic.status === "error") { common.logger.error({ @@ -124,17 +136,6 @@ export async function createViews({ return; } - const database = createDatabase({ - common, - // Note: `namespace` is not used in this command - namespace: { - schema: cliOptions.schema!, - viewsSchema: undefined, - }, - preBuild: buildResult.result, - schemaBuild: emptySchemaBuild, - }); - const endClock = startClock(); const schemaExists = await database.adminQB diff --git a/packages/core/src/bin/commands/dev.ts b/packages/core/src/bin/commands/dev.ts index 4f08388ac..373889886 100644 --- a/packages/core/src/bin/commands/dev.ts +++ b/packages/core/src/bin/commands/dev.ts @@ -3,7 +3,11 @@ import path from "node:path"; import { createBuild } from "@/build/index.js"; import { type Database, createDatabase } from "@/database/index.js"; import type { Common } from "@/internal/common.js"; -import { NonRetryableUserError, ShutdownError } from "@/internal/errors.js"; +import { + BaseError, + ShutdownError, + isUserDerivedError, +} from "@/internal/errors.js"; import { createLogger } from "@/internal/logger.js"; import { MetricsService } from "@/internal/metrics.js"; import { buildOptions } from "@/internal/options.js"; @@ -102,12 +106,6 @@ export async function dev({ cliOptions }: { cliOptions: CliOptions }) { } if (result.status === "error") { - if (isInitialBuild === false) { - common.logger.error({ - error: result.error, - }); - } - // This handles indexing function build failures on hot reload. metrics.hasError = true; return; @@ -162,23 +160,6 @@ export async function dev({ cliOptions }: { cliOptions: CliOptions }) { return; } - const databaseDiagnostic = await build.databaseDiagnostic({ - preBuild: preCompileResult.result, - }); - if (databaseDiagnostic.status === "error") { - common.logger.error({ - msg: "Build failed", - stage: "diagnostic", - error: databaseDiagnostic.error, - }); - buildQueue.add({ - status: "error", - kind: "indexing", - error: databaseDiagnostic.error, - }); - return; - } - const compileSchemaResult = build.compileSchema({ ...schemaResult.result, preBuild: preCompileResult.result, @@ -276,11 +257,40 @@ export async function dev({ cliOptions }: { cliOptions: CliOptions }) { preBuild: preCompileResult.result, schemaBuild: compileSchemaResult.result, }); - crashRecoveryCheckpoint = await database.migrate({ - buildId: indexingBuildResult.result.buildId, - chains: indexingBuildResult.result.chains, - finalizedBlocks: indexingBuildResult.result.finalizedBlocks, + + const databaseDiagnostic = await build.databaseDiagnostic({ + preBuild: preCompileResult.result, + database, }); + if (databaseDiagnostic.status === "error") { + common.logger.error({ + msg: "Build failed", + stage: "diagnostic", + error: databaseDiagnostic.error, + }); + buildQueue.add({ + status: "error", + kind: "indexing", + error: databaseDiagnostic.error, + }); + return; + } + + crashRecoveryCheckpoint = await database + .migrate({ + buildId: indexingBuildResult.result.buildId, + chains: indexingBuildResult.result.chains, + finalizedBlocks: indexingBuildResult.result.finalizedBlocks, + }) + .catch((error) => { + common.logger.error({ + msg: "Database migration failed", + stage: "migration", + error: error as Error, + }); + + throw error; + }); await database.migrateSync(); @@ -437,37 +447,31 @@ export async function dev({ cliOptions }: { cliOptions: CliOptions }) { process.on("uncaughtException", (error: Error) => { if (error instanceof ShutdownError) return; - if (error instanceof NonRetryableUserError) { - common.logger.error({ - msg: "uncaughtException", - error, - }); - - buildQueue.clear(); - buildQueue.add({ status: "error", kind: "indexing", error }); + if (error instanceof BaseError) { + common.logger.error({ msg: `uncaughtException: ${error.name}` }); + if (isUserDerivedError(error)) { + buildQueue.clear(); + buildQueue.add({ status: "error", kind: "indexing", error }); + } else { + exit({ code: 75 }); + } } else { - common.logger.error({ - msg: "uncaughtException", - error, - }); + common.logger.error({ msg: "uncaughtException", error }); exit({ code: 75 }); } }); process.on("unhandledRejection", (error: Error) => { if (error instanceof ShutdownError) return; - if (error instanceof NonRetryableUserError) { - common.logger.error({ - msg: "unhandledRejection", - error, - }); - - buildQueue.clear(); - buildQueue.add({ status: "error", kind: "indexing", error }); + if (error instanceof BaseError) { + common.logger.error({ msg: `unhandledRejection: ${error.name}` }); + if (isUserDerivedError(error)) { + buildQueue.clear(); + buildQueue.add({ status: "error", kind: "indexing", error }); + } else { + exit({ code: 75 }); + } } else { - common.logger.error({ - msg: "unhandledRejection", - error, - }); + common.logger.error({ msg: "unhandledRejection", error }); exit({ code: 75 }); } }); diff --git a/packages/core/src/bin/commands/list.ts b/packages/core/src/bin/commands/list.ts index ba77fd74e..148a6a09b 100644 --- a/packages/core/src/bin/commands/list.ts +++ b/packages/core/src/bin/commands/list.ts @@ -84,8 +84,17 @@ export async function list({ cliOptions }: { cliOptions: CliOptions }) { return; } + const database = createDatabase({ + common, + // Note: `namespace` is not used in this command + namespace: { schema: "public", viewsSchema: undefined }, + preBuild: buildResult.result, + schemaBuild: emptySchemaBuild, + }); + const databaseDiagnostic = await build.databaseDiagnostic({ preBuild: buildResult.result, + database, }); if (databaseDiagnostic.status === "error") { common.logger.error({ @@ -97,14 +106,6 @@ export async function list({ cliOptions }: { cliOptions: CliOptions }) { return; } - const database = createDatabase({ - common, - // Note: `namespace` is not used in this command - namespace: { schema: "public", viewsSchema: undefined }, - preBuild: buildResult.result, - schemaBuild: emptySchemaBuild, - }); - const ponderSchemas = await database.adminQB.wrap((db) => db .select({ schema: TABLES.table_schema }) diff --git a/packages/core/src/bin/commands/prune.ts b/packages/core/src/bin/commands/prune.ts index f1b35d4b9..e3b5f2762 100644 --- a/packages/core/src/bin/commands/prune.ts +++ b/packages/core/src/bin/commands/prune.ts @@ -91,8 +91,17 @@ export async function prune({ cliOptions }: { cliOptions: CliOptions }) { return; } + const database = createDatabase({ + common, + // Note: `namespace` is not used in this command + namespace: { schema: "public", viewsSchema: undefined }, + preBuild: buildResult.result, + schemaBuild: emptySchemaBuild, + }); + const databaseDiagnostic = await build.databaseDiagnostic({ preBuild: buildResult.result, + database, }); if (databaseDiagnostic.status === "error") { common.logger.error({ @@ -104,14 +113,6 @@ export async function prune({ cliOptions }: { cliOptions: CliOptions }) { return; } - const database = createDatabase({ - common, - // Note: `namespace` is not used in this command - namespace: { schema: "public", viewsSchema: undefined }, - preBuild: buildResult.result, - schemaBuild: emptySchemaBuild, - }); - const ponderSchemas = await database.adminQB.wrap((db) => db .select({ schema: TABLES.table_schema, tableCount: count() }) diff --git a/packages/core/src/bin/commands/serve.ts b/packages/core/src/bin/commands/serve.ts index 53e2328f0..4130de26e 100644 --- a/packages/core/src/bin/commands/serve.ts +++ b/packages/core/src/bin/commands/serve.ts @@ -113,19 +113,6 @@ export async function serve({ cliOptions }: { cliOptions: CliOptions }) { return; } - const databaseDiagnostic = await build.databaseDiagnostic({ - preBuild: preCompileResult.result, - }); - if (databaseDiagnostic.status === "error") { - common.logger.error({ - msg: "Build failed", - stage: "diagnostic", - error: databaseDiagnostic.error, - }); - await exit({ code: 75 }); - return; - } - const compileSchemaResult = build.compileSchema({ ...schemaResult.result, preBuild: preCompileResult.result, @@ -164,6 +151,20 @@ export async function serve({ cliOptions }: { cliOptions: CliOptions }) { schemaBuild: compileSchemaResult.result, }); + const databaseDiagnostic = await build.databaseDiagnostic({ + preBuild: preCompileResult.result, + database, + }); + if (databaseDiagnostic.status === "error") { + common.logger.error({ + msg: "Build failed", + stage: "diagnostic", + error: databaseDiagnostic.error, + }); + await exit({ code: 75 }); + return; + } + const schemaExists = await database.adminQB .wrap((db) => db diff --git a/packages/core/src/bin/commands/start.ts b/packages/core/src/bin/commands/start.ts index 5bffae936..16c7140cb 100644 --- a/packages/core/src/bin/commands/start.ts +++ b/packages/core/src/bin/commands/start.ts @@ -137,19 +137,6 @@ export async function start({ return; } - const databaseDiagnostic = await build.databaseDiagnostic({ - preBuild: preCompileResult.result, - }); - if (databaseDiagnostic.status === "error") { - common.logger.error({ - msg: "Build failed", - stage: "diagnostic", - error: databaseDiagnostic.error, - }); - await exit({ code: 75 }); - return; - } - const compileSchemaResult = build.compileSchema({ ...schemaResult.result, preBuild: preCompileResult.result, @@ -225,11 +212,36 @@ export async function start({ preBuild: preCompileResult.result, schemaBuild: compileSchemaResult.result, }); - const crashRecoveryCheckpoint = await database.migrate({ - buildId: indexingBuildResult.result.buildId, - chains: indexingBuildResult.result.chains, - finalizedBlocks: indexingBuildResult.result.finalizedBlocks, + + const databaseDiagnostic = await build.databaseDiagnostic({ + preBuild: preCompileResult.result, + database, }); + if (databaseDiagnostic.status === "error") { + common.logger.error({ + msg: "Build failed", + stage: "diagnostic", + error: databaseDiagnostic.error, + }); + await exit({ code: 75 }); + return; + } + + const crashRecoveryCheckpoint = await database + .migrate({ + buildId: indexingBuildResult.result.buildId, + chains: indexingBuildResult.result.chains, + finalizedBlocks: indexingBuildResult.result.finalizedBlocks, + }) + .catch((error) => { + common.logger.error({ + msg: "Database migration failed", + stage: "migration", + error: error as Error, + }); + + throw error; + }); await database.migrateSync(); diff --git a/packages/core/src/bin/isolatedController.ts b/packages/core/src/bin/isolatedController.ts index f744d1935..99c1b66f3 100644 --- a/packages/core/src/bin/isolatedController.ts +++ b/packages/core/src/bin/isolatedController.ts @@ -5,10 +5,7 @@ import { Worker } from "node:worker_threads"; import { createIndexes, createViews } from "@/database/actions.js"; import { type Database, getPonderMetaTable } from "@/database/index.js"; import type { Common } from "@/internal/common.js"; -import { - NonRetryableUserError, - nonRetryableUserErrorNames, -} from "@/internal/errors.js"; +import {} from "@/internal/errors.js"; import { AggregateMetricsService, getAppProgress } from "@/internal/metrics.js"; import type { CrashRecoveryCheckpoint, @@ -248,33 +245,18 @@ export async function isolatedController({ break; } case "error": { - let error: Error; - if (nonRetryableUserErrorNames.includes(message.error.name)) { - error = new NonRetryableUserError(message.error.message); - } else { - error = new Error(message.error.message); - } - error.name = message.error.name; - error.stack = message.error.stack; - throw error; + throw message.error; } } }, ); worker.on("error", (error: Error) => { - if (nonRetryableUserErrorNames.includes(error.name)) { - error = new NonRetryableUserError(error.message); - } else { - error = new Error(error.message); - } throw error; }); worker.on("exit", (code: number) => { - const error = new Error(`Worker thread exited with code ${code}.`); - error.stack = undefined; - throw error; + throw new Error(`Worker thread exited with code ${code}.`); }); perThreadWorkers.push(worker); diff --git a/packages/core/src/bin/utils/exit.ts b/packages/core/src/bin/utils/exit.ts index b7f1b1a35..f341ce87e 100644 --- a/packages/core/src/bin/utils/exit.ts +++ b/packages/core/src/bin/utils/exit.ts @@ -1,7 +1,11 @@ import os from "node:os"; import readline from "node:readline"; import type { Common } from "@/internal/common.js"; -import { NonRetryableUserError, ShutdownError } from "@/internal/errors.js"; +import { + BaseError, + ShutdownError, + isUserDerivedError, +} from "@/internal/errors.js"; import type { Options } from "@/internal/options.js"; const SHUTDOWN_GRACE_PERIOD_MS = 5_000; @@ -84,25 +88,39 @@ export const createExit = ({ if (options.command !== "dev") { process.on("uncaughtException", (error: Error) => { if (error instanceof ShutdownError) return; - common.logger.error({ - msg: "uncaughtException", - error, - }); - if (error instanceof NonRetryableUserError) { - exit({ code: 1 }); + if (error instanceof BaseError) { + common.logger.error({ + msg: `unhandledRejection: ${error.name} ${error.message}`, + }); + if (isUserDerivedError(error)) { + exit({ code: 1 }); + } else { + exit({ code: 75 }); + } } else { + common.logger.error({ + msg: "unhandledRejection", + error, + }); exit({ code: 75 }); } }); process.on("unhandledRejection", (error: Error) => { if (error instanceof ShutdownError) return; - common.logger.error({ - msg: "unhandledRejection", - error, - }); - if (error instanceof NonRetryableUserError) { - exit({ code: 1 }); + if (error instanceof BaseError) { + common.logger.error({ + msg: `unhandledRejection: ${error.name} ${error.message}`, + }); + if (isUserDerivedError(error)) { + exit({ code: 1 }); + } else { + exit({ code: 75 }); + } } else { + common.logger.error({ + msg: "unhandledRejection", + error, + }); exit({ code: 75 }); } }); diff --git a/packages/core/src/build/config.test.ts b/packages/core/src/build/config.test.ts index 83a884e45..735f9fdc1 100644 --- a/packages/core/src/build/config.test.ts +++ b/packages/core/src/build/config.test.ts @@ -12,12 +12,7 @@ import { zeroAddress, } from "viem"; import { beforeEach, expect, test } from "vitest"; -import { - buildConfig, - buildIndexingFunctions, - safeBuildConfig, - safeBuildIndexingFunctions, -} from "./config.js"; +import { buildConfig, buildIndexingFunctions } from "./config.js"; beforeEach(setupCommon); beforeEach(setupAnvil); @@ -209,16 +204,15 @@ test("buildIndexingFunctions() throw useful error for common 0.11 migration mist config, }); - const result = await safeBuildIndexingFunctions({ - common: context.common, - // @ts-expect-error - config, - indexingFunctions, - configBuild, - }); - - expect(result.status).toBe("error"); - expect(result.error?.message).toBe( + await expect( + buildIndexingFunctions({ + common: context.common, + // @ts-expect-error + config, + indexingFunctions, + configBuild, + }), + ).rejects.toThrowError( "Validation failed: Chain for 'a' is null or undefined. Expected one of ['mainnet', 'optimism']. Did you forget to change 'network' to 'chain' when migrating to 0.11?", ); }); @@ -402,15 +396,14 @@ test("buildIndexingFunctions() validates chain name", async () => { common: context.common, config, }); - const result = await safeBuildIndexingFunctions({ - common: context.common, - config, - indexingFunctions: [{ name: "a:Event0", fn: () => {} }], - configBuild, - }); - - expect(result.status).toBe("error"); - expect(result.error?.message).toBe( + await expect( + buildIndexingFunctions({ + common: context.common, + config, + indexingFunctions: [{ name: "a:Event0", fn: () => {} }], + configBuild, + }), + ).rejects.toThrowError( "Validation failed: Invalid chain for 'a'. Got 'mainnetz', expected one of ['mainnet'].", ); }); @@ -430,13 +423,12 @@ test.skip("buildConfig() warns for public RPC URL", () => { }, }); - const result = safeBuildConfig({ + const { logs } = buildConfig({ common: context.common, config, }); - expect(result.status).toBe("success"); - expect(result.logs!.filter((l) => l.level === "warn")).toEqual([ + expect(logs!.filter((l) => l.level === "warn")).toEqual([ { level: "warn", msg: "Chain 'mainnet' is using a public RPC URL (https://cloudflare-eth.com). Most apps require an RPC URL with a higher rate limit.", @@ -458,12 +450,7 @@ test("buildConfig() handles chains not found in viem", () => { }, }); - const result = safeBuildConfig({ - common: context.common, - config, - }); - - expect(result.status).toBe("success"); + buildConfig({ common: context.common, config }); }); test("buildIndexingFunctions() validates event filter event name must be present in ABI", async () => { @@ -490,15 +477,14 @@ test("buildIndexingFunctions() validates event filter event name must be present common: context.common, config, }); - const result = await safeBuildIndexingFunctions({ - common: context.common, - config, - indexingFunctions: [{ name: "a:Event0", fn: () => {} }], - configBuild, - }); - - expect(result.status).toBe("error"); - expect(result.error?.message).toBe( + await expect( + buildIndexingFunctions({ + common: context.common, + config, + indexingFunctions: [{ name: "a:Event0", fn: () => {} }], + configBuild, + }), + ).rejects.toThrowError( "Validation failed: Invalid filter for contract 'a'. Got event name 'Event2', expected one of ['Event0'].", ); }); @@ -522,15 +508,14 @@ test("buildIndexingFunctions() validates address empty string", async () => { config, }); - const result = await safeBuildIndexingFunctions({ - common: context.common, - config, - indexingFunctions: [{ name: "a:Event0", fn: () => {} }], - configBuild, - }); - - expect(result.status).toBe("error"); - expect(result.error?.message).toBe( + await expect( + buildIndexingFunctions({ + common: context.common, + config, + indexingFunctions: [{ name: "a:Event0", fn: () => {} }], + configBuild, + }), + ).rejects.toThrowError( "Validation failed: Invalid prefix for address ''. Got '', expected '0x'.", ); }); @@ -551,15 +536,14 @@ test("buildIndexingFunctions() validates address prefix", async () => { }); const configBuild = buildConfig({ common: context.common, config }); - const result = await safeBuildIndexingFunctions({ - common: context.common, - config, - indexingFunctions: [{ name: "a:Event0", fn: () => {} }], - configBuild, - }); - - expect(result.status).toBe("error"); - expect(result.error?.message).toBe( + await expect( + buildIndexingFunctions({ + common: context.common, + config, + indexingFunctions: [{ name: "a:Event0", fn: () => {} }], + configBuild, + }), + ).rejects.toThrowError( "Validation failed: Invalid prefix for address '0b0000000000000000000000000000000000000001'. Got '0b', expected '0x'.", ); }); @@ -579,15 +563,14 @@ test("buildIndexingFunctions() validates address length", async () => { }); const configBuild = buildConfig({ common: context.common, config }); - const result = await safeBuildIndexingFunctions({ - common: context.common, - config, - indexingFunctions: [{ name: "a:Event0", fn: () => {} }], - configBuild, - }); - - expect(result.status).toBe("error"); - expect(result.error?.message).toBe( + await expect( + buildIndexingFunctions({ + common: context.common, + config, + indexingFunctions: [{ name: "a:Event0", fn: () => {} }], + configBuild, + }), + ).rejects.toThrowError( "Validation failed: Invalid length for address '0x000000000001'. Got 14, expected 42 characters.", ); }); @@ -923,15 +906,14 @@ test("buildIndexingFunctions() validates factory interval", async () => { }); const configBuild = buildConfig({ common: context.common, config }); - const result = await safeBuildIndexingFunctions({ - common: context.common, - config, - indexingFunctions: [{ name: "a:Event0", fn: () => {} }], - configBuild, - }); - - expect(result.status).toBe("error"); - expect(result.error?.message).toBe( + await expect( + buildIndexingFunctions({ + common: context.common, + config, + indexingFunctions: [{ name: "a:Event0", fn: () => {} }], + configBuild, + }), + ).rejects.toThrowError( "Validation failed: Start block for 'a' is before start block of factory address (16370050 > 16370000).", ); }); @@ -955,20 +937,17 @@ test("buildIndexingFunctions() validates start and end block", async () => { // @ts-expect-error const configBuild = buildConfig({ common: context.common, config }); - const result = await safeBuildIndexingFunctions({ - common: context.common, - // @ts-expect-error - config, - indexingFunctions: [{ name: "a:Event0", fn: () => {} }], - configBuild, - }); - - expect(result).toMatchInlineSnapshot(` - { - "error": [BuildError: Validation failed: Invalid start block for 'a'. Got 16370000 typeof string, expected an integer.], - "status": "error", - } - `); + await expect( + buildIndexingFunctions({ + common: context.common, + // @ts-expect-error + config, + indexingFunctions: [{ name: "a:Event0", fn: () => {} }], + configBuild, + }), + ).rejects.toThrowError( + "Validation failed: Invalid start block for 'a'. Got 16370000 typeof string, expected an integer.", + ); }); test("buildIndexingFunctions() returns chain, rpc, and finalized block", async () => { diff --git a/packages/core/src/build/config.ts b/packages/core/src/build/config.ts index f640198d5..5a2f930f3 100644 --- a/packages/core/src/build/config.ts +++ b/packages/core/src/build/config.ts @@ -144,9 +144,10 @@ export async function buildIndexingFunctions({ throw new BlockNotFoundError({ blockNumber: "latest" as any }); return hexToNumber((block as SyncBlock).number); }) - .catch((e) => { - throw new Error( - `Unable to fetch "latest" block for chain '${chain.name}':\n${e.message}`, + .catch((_error) => { + throw new BuildError( + `Unable to fetch "latest" block for chain '${chain.name}'`, + { cause: _error }, ); }); perChainLatestBlockNumber.set(chain.name, blockPromise); @@ -165,9 +166,10 @@ export async function buildIndexingFunctions({ retryNullBlockRequest: true, }) .then((block) => hexToNumber((block as SyncBlock).number)) - .catch((e) => { - throw new Error( - `Unable to fetch "latest" block for chain '${chain.name}':\n${e.message}`, + .catch((_error) => { + throw new BuildError( + `Unable to fetch "latest" block for chain '${chain.name}'`, + { cause: _error }, ); }); @@ -195,7 +197,7 @@ export async function buildIndexingFunctions({ ...Object.keys(config.blocks ?? {}), ]) { if (sourceNames.has(source)) { - throw new Error( + throw new BuildError( `Validation failed: Duplicate name '${source}' not allowed. The name must be unique across blocks, contracts, and accounts.`, ); } @@ -204,7 +206,7 @@ export async function buildIndexingFunctions({ // Validate and build indexing functions if (indexingFunctions.length === 0) { - throw new Error( + throw new BuildError( "Validation failed: Found 0 registered indexing functions.", ); } @@ -219,7 +221,7 @@ export async function buildIndexingFunctions({ const [sourceName] = eventNameComponents; if (!sourceName) { - throw new Error( + throw new BuildError( `Validation failed: Invalid event '${eventName}', expected format '{sourceName}:{eventName}' or '{sourceName}.{functionName}'.`, ); } @@ -231,7 +233,7 @@ export async function buildIndexingFunctions({ (sourceType !== "transaction" && sourceType !== "transfer") || (fromOrTo !== "from" && fromOrTo !== "to") ) { - throw new Error( + throw new BuildError( `Validation failed: Invalid event '${eventName}', expected format '{sourceName}:transaction:from', '{sourceName}:transaction:to', '{sourceName}:transfer:from', or '{sourceName}:transfer:to'.`, ); } @@ -239,18 +241,18 @@ export async function buildIndexingFunctions({ const [, sourceEventName] = eventNameComponents; if (!sourceEventName) { - throw new Error( + throw new BuildError( `Validation failed: Invalid event '${eventName}', expected format '{sourceName}:{eventName}' or '{sourceName}.{functionName}'.`, ); } } else { - throw new Error( + throw new BuildError( `Validation failed: Invalid event '${eventName}', expected format '{sourceName}:{eventName}' or '{sourceName}.{functionName}'.`, ); } if (eventNames.has(eventName)) { - throw new Error( + throw new BuildError( `Validation failed: Multiple indexing functions registered for event '${eventName}'.`, ); } @@ -265,7 +267,7 @@ export async function buildIndexingFunctions({ }).find((_sourceName) => _sourceName === sourceName); if (!matchedSourceName) { - throw new Error( + throw new BuildError( `Validation failed: Invalid event '${eventName}' uses an unrecognized contract, account, or block interval name. Expected one of [${Array.from( sourceNames, ) @@ -282,7 +284,7 @@ export async function buildIndexingFunctions({ ...flattenSources(config.blocks ?? {}), ]) { if (source.chain === null || source.chain === undefined) { - throw new Error( + throw new BuildError( `Validation failed: Chain for '${source.name}' is null or undefined. Expected one of [${chains .map((n) => `'${n.name}'`) .join( @@ -293,7 +295,7 @@ export async function buildIndexingFunctions({ const chain = chains.find((n) => n.name === source.chain); if (!chain) { - throw new Error( + throw new BuildError( `Validation failed: Invalid chain for '${ source.name }'. Got '${source.chain}', expected one of [${chains @@ -310,19 +312,19 @@ export async function buildIndexingFunctions({ endBlock !== undefined && endBlock < startBlock ) { - throw new Error( + throw new BuildError( `Validation failed: Start block for '${source.name}' is after end block (${startBlock} > ${endBlock}).`, ); } if (startBlock !== undefined && Number.isInteger(startBlock) === false) { - throw new Error( + throw new BuildError( `Validation failed: Invalid start block for '${source.name}'. Got ${startBlock} typeof ${typeof startBlock}, expected an integer.`, ); } if (endBlock !== undefined && Number.isInteger(endBlock) === false) { - throw new Error( + throw new BuildError( `Validation failed: Invalid end block for '${source.name}'. Got ${endBlock} typeof ${typeof endBlock}, expected an integer.`, ); } @@ -343,7 +345,7 @@ export async function buildIndexingFunctions({ factoryStartBlock !== undefined && (startBlock === undefined || factoryStartBlock > startBlock) ) { - throw new Error( + throw new BuildError( `Validation failed: Start block for '${source.name}' is before start block of factory address (${factoryStartBlock} > ${startBlock}).`, ); } @@ -352,7 +354,7 @@ export async function buildIndexingFunctions({ endBlock !== undefined && (factoryEndBlock === undefined || factoryEndBlock > endBlock) ) { - throw new Error( + throw new BuildError( `Validation failed: End block for ${source.name} is before end block of factory address (${factoryEndBlock} > ${endBlock}).`, ); } @@ -362,7 +364,7 @@ export async function buildIndexingFunctions({ factoryEndBlock !== undefined && factoryEndBlock < factoryStartBlock ) { - throw new Error( + throw new BuildError( `Validation failed: Start block for '${source.name}' factory address is after end block (${factoryStartBlock} > ${factoryEndBlock}).`, ); } @@ -437,14 +439,14 @@ export async function buildIndexingFunctions({ ? resolvedAddress : [resolvedAddress as Address]) { if (!address!.startsWith("0x")) - throw new Error( + throw new BuildError( `Validation failed: Invalid prefix for address '${address}'. Got '${address!.slice( 0, 2, )}', expected '0x'.`, ); if (address!.length !== 42) - throw new Error( + throw new BuildError( `Validation failed: Invalid length for address '${address}'. Got ${address!.length}, expected 42 characters.`, ); } @@ -483,7 +485,7 @@ export async function buildIndexingFunctions({ toSafeName({ abi: source.abi, item }) === filter.event, ); if (!abiEvent) { - throw new Error( + throw new BuildError( `Validation failed: Invalid filter for contract '${ source.name }'. Got event name '${filter.event}', expected one of [${source.abi @@ -505,7 +507,7 @@ export async function buildIndexingFunctions({ ); if (indexingFunction === undefined) { - throw new Error( + throw new BuildError( `Validation failed: Event selector '${toSafeName({ abi: source.abi, item: abiItem })}' is used in a filter but does not have a corresponding indexing function.`, ); } @@ -551,7 +553,7 @@ export async function buildIndexingFunctions({ toSafeName({ abi: source.abi, item }) === logEventName, ); if (abiEvent === undefined) { - throw new Error( + throw new BuildError( `Validation failed: Event name for event '${logEventName}' not found in the contract ABI. Got '${logEventName}', expected one of [${source.abi .filter((item): item is AbiEvent => item.type === "event") .map((item) => `'${toSafeName({ abi: source.abi, item })}'`) @@ -625,7 +627,7 @@ export async function buildIndexingFunctions({ toSafeName({ abi: source.abi, item }) === functionEventName, ); if (abiFunction === undefined) { - throw new Error( + throw new BuildError( `Validation failed: Function name for function '${functionEventName}' not found in the contract ABI. Got '${functionEventName}', expected one of [${source.abi .filter((item): item is AbiFunction => item.type === "function") .map((item) => `'${toSafeName({ abi: source.abi, item })}'`) @@ -697,7 +699,7 @@ export async function buildIndexingFunctions({ const resolvedAddress = source?.address; if (resolvedAddress === undefined) { - throw new Error( + throw new BuildError( `Validation failed: Account '${source.name}' must specify an 'address'.`, ); } @@ -730,14 +732,14 @@ export async function buildIndexingFunctions({ ? resolvedAddress : [resolvedAddress]) { if (!address!.startsWith("0x")) - throw new Error( + throw new BuildError( `Validation failed: Invalid prefix for address '${address}'. Got '${address!.slice( 0, 2, )}', expected '0x'.`, ); if (address!.length !== 42) - throw new Error( + throw new BuildError( `Validation failed: Invalid length for address '${address}'. Got ${address!.length}, expected 42 characters.`, ); } @@ -868,7 +870,7 @@ export async function buildIndexingFunctions({ const interval = Number.isNaN(intervalMaybeNan) ? 0 : intervalMaybeNan; if (!Number.isInteger(interval) || interval === 0) { - throw new Error( + throw new BuildError( `Validation failed: Invalid interval for block interval '${source.name}'. Got ${interval}, expected a non-zero integer.`, ); } @@ -947,7 +949,7 @@ export async function buildIndexingFunctions({ } if (chainsWithSources.length === 0) { - throw new Error( + throw new BuildError( "Validation failed: Found 0 chains with registered indexing functions.", ); } @@ -982,7 +984,7 @@ export function buildConfig({ const chains: Chain[] = Object.entries(config.chains).map( ([chainName, chain]) => { if (chain.id > Number.MAX_SAFE_INTEGER) { - throw new Error( + throw new BuildError( `Chain "${chainName}" with id ${chain.id} has a chain_id that is too large.`, ); } @@ -996,7 +998,7 @@ export function buildConfig({ if (chain.rpc === undefined || chain.rpc === "") { if (matchedChain === undefined) { - throw new Error( + throw new BuildError( `Chain "${chainName}" with id ${chain.id} has no RPC defined and no default RPC URL was found in 'viem/chains'.`, ); } @@ -1008,7 +1010,7 @@ export function buildConfig({ const rpcs = Array.isArray(chain.rpc) ? chain.rpc : [chain.rpc]; if (rpcs.length === 0) { - throw new Error( + throw new BuildError( `Chain "${chainName}" with id ${chain.id} has no RPC URLs.`, ); } @@ -1044,7 +1046,7 @@ export function buildConfig({ } if (chain.pollingInterval !== undefined && chain.pollingInterval! < 100) { - throw new Error( + throw new BuildError( `Invalid 'pollingInterval' for chain '${chainName}. Expected 100 milliseconds or greater, got ${chain.pollingInterval} milliseconds.`, ); } @@ -1066,7 +1068,7 @@ export function buildConfig({ const chainIds = new Set(); for (const chain of chains) { if (chainIds.has(chain.id)) { - throw new Error( + throw new BuildError( `Invalid id for chain "${chain.name}". ${chain.id} is already in use.`, ); } @@ -1083,59 +1085,3 @@ export function buildConfig({ return { chains, rpcs, logs }; } - -export async function safeBuildIndexingFunctions({ - common, - config, - indexingFunctions, - configBuild, -}: { - common: Common; - config: Config; - indexingFunctions: IndexingFunctions; - configBuild: Pick; -}) { - try { - const result = await buildIndexingFunctions({ - common, - config, - indexingFunctions, - configBuild, - }); - - return { - status: "success", - chains: result.chains, - rpcs: result.rpcs, - finalizedBlocks: result.finalizedBlocks, - eventCallbacks: result.eventCallbacks, - setupCallbacks: result.setupCallbacks, - contracts: result.contracts, - logs: result.logs, - } as const; - } catch (_error) { - const buildError = new BuildError((_error as Error).message); - buildError.stack = undefined; - return { status: "error", error: buildError } as const; - } -} - -export function safeBuildConfig({ - common, - config, -}: { common: Common; config: Config }) { - try { - const result = buildConfig({ common, config }); - - return { - status: "success", - chains: result.chains, - rpcs: result.rpcs, - logs: result.logs, - } as const; - } catch (_error) { - const buildError = new BuildError((_error as Error).message); - buildError.stack = undefined; - return { status: "error", error: buildError } as const; - } -} diff --git a/packages/core/src/build/index.ts b/packages/core/src/build/index.ts index db285ac0f..1c26050d9 100644 --- a/packages/core/src/build/index.ts +++ b/packages/core/src/build/index.ts @@ -4,14 +4,9 @@ import path from "node:path"; import type { CliOptions } from "@/bin/ponder.js"; import type { Config } from "@/config/index.js"; import type { Database } from "@/database/index.js"; -import { createQB } from "@/database/queryBuilder.js"; import { MAX_DATABASE_OBJECT_NAME_LENGTH } from "@/drizzle/onchain.js"; import type { Common } from "@/internal/common.js"; -import { - BuildError, - NonRetryableUserError, - RetryableError, -} from "@/internal/errors.js"; +import { BuildError, ExecuteFileError } from "@/internal/errors.js"; import type { ApiBuild, IndexingBuild, @@ -21,13 +16,10 @@ import type { Schema, SchemaBuild, } from "@/internal/types.js"; -import { createPool, getDatabaseName } from "@/utils/pg.js"; -import { createPglite } from "@/utils/pglite.js"; +import { getDatabaseName } from "@/utils/pg.js"; import { getNextAvailablePort } from "@/utils/port.js"; import type { Result } from "@/utils/result.js"; import { startClock } from "@/utils/timer.js"; -import { drizzle as drizzleNodePostgres } from "drizzle-orm/node-postgres"; -import { drizzle as drizzlePglite } from "drizzle-orm/pglite"; import { glob } from "glob"; import { Hono } from "hono"; import superjson from "superjson"; @@ -38,10 +30,10 @@ import { ViteNodeServer } from "vite-node/server"; import { installSourcemapsSupport } from "vite-node/source-map"; import { normalizeModuleId, toFilePath } from "vite-node/utils"; import viteTsconfigPathsPlugin from "vite-tsconfig-paths"; -import { safeBuildConfig, safeBuildIndexingFunctions } from "./config.js"; +import { buildConfig, buildIndexingFunctions } from "./config.js"; import { vitePluginPonder } from "./plugin.js"; -import { safeBuildPre } from "./pre.js"; -import { safeBuildSchema } from "./schema.js"; +import { buildPre } from "./pre.js"; +import { buildSchema } from "./schema.js"; import { parseViteNodeError } from "./stacktrace.js"; declare global { @@ -93,7 +85,10 @@ export type Build = { rpcDiagnostic: (params: { configBuild: Pick; }) => Promise>; - databaseDiagnostic: (params: { preBuild: PreBuild }) => Promise>; + databaseDiagnostic: (params: { + preBuild: Pick; + database: Database; + }) => Promise>; }; export const createBuild = async ({ @@ -183,7 +178,11 @@ export const createBuild = async ({ return { status: "success", exports } as const; } catch (error_) { const relativePath = path.relative(common.options.rootDir, file); - const error = parseViteNodeError(relativePath, error_ as Error); + const viteError = parseViteNodeError(relativePath, error_ as Error); + const error = new ExecuteFileError(undefined, { + cause: viteError, + }); + error.stack = viteError.stack; return { status: "error", error } as const; } }; @@ -194,40 +193,34 @@ export const createBuild = async ({ { status: "success"; exports: any } | { status: "error"; error: Error } > => { let timeoutId: ReturnType; - const timeout = new Promise((resolve) => { + const timeout = new Promise((resolve) => { timeoutId = setTimeout( () => resolve( - new NonRetryableUserError( - "File execution did not complete (waited 10s)", + new BuildError( + `Executing file "${path.relative(common.options.rootDir, file)}" took longer than 10 seconds`, ), ), 10_000, ); }); - const res = await Promise.race([executeFile({ file }), timeout]); - if (res instanceof NonRetryableUserError) { - return { status: "error", error: res }; + const result = await Promise.race([executeFile({ file }), timeout]); + if (result instanceof BuildError) { + return { status: "error", error: result }; } clearTimeout(timeoutId!); - return res; + return result; }; const build = { async executeConfig(): Promise { - const executeResult = await executeFile({ + const executeResult = await executeFileWithTimeout({ file: common.options.configFile, }); if (executeResult.status === "error") { - common.logger.error({ - msg: "Error while executing file", - file: "ponder.config.ts", - error: executeResult.error, - }); - return executeResult; } @@ -250,17 +243,11 @@ export const createBuild = async ({ } as const; }, async executeSchema(): Promise { - const executeResult = await executeFile({ + const executeResult = await executeFileWithTimeout({ file: common.options.schemaFile, }); if (executeResult.status === "error") { - common.logger.error({ - msg: "Error while executing file", - file: "ponder.schema.ts", - error: executeResult.error, - }); - return executeResult; } @@ -284,11 +271,6 @@ export const createBuild = async ({ const executeResult = await executeFileWithTimeout({ file }); if (executeResult.status === "error") { - common.logger.error({ - msg: "Error while executing file", - file: path.relative(common.options.rootDir, file), - error: executeResult.error, - }); return executeResult; } } @@ -300,11 +282,8 @@ export const createBuild = async ({ try { const contents = fs.readFileSync(file, "utf-8"); hash.update(contents); - } catch (e) { - common.logger.warn({ - msg: "Unable to read file", - file, - }); + } catch { + common.logger.warn({ msg: "Unable to read file", file }); hash.update(file); } } @@ -325,26 +304,19 @@ export const createBuild = async ({ globalThis.PONDER_INDEXING_BUILD = configBuild; globalThis.PONDER_DATABASE = database; - if (!fs.existsSync(common.options.apiFile)) { + if (fs.existsSync(common.options.apiFile) === false) { const error = new BuildError( `API endpoint file not found. Create a file at ${common.options.apiFile}. Read more: https://ponder.sh/docs/api-reference/ponder/api-endpoints`, ); - error.stack = undefined; return { status: "error", error }; } - const executeResult = await executeFile({ + const executeResult = await executeFileWithTimeout({ file: common.options.apiFile, }); if (executeResult.status === "error") { - common.logger.error({ - msg: "Error while executing file", - file: path.relative(common.options.rootDir, common.options.apiFile), - error: executeResult.error, - }); - return executeResult; } @@ -354,7 +326,6 @@ export const createBuild = async ({ const error = new BuildError( "API endpoint file does not export a Hono instance as the default export. Read more: https://ponder.sh/docs/api-reference/ponder/api-endpoints", ); - error.stack = undefined; return { status: "error", error }; } @@ -372,7 +343,6 @@ export const createBuild = async ({ const error = new BuildError( `Database schema required. Specify with "DATABASE_SCHEMA" env var or "--schema" CLI flag. Read more: https://ponder.sh/docs/database#database-schema`, ); - error.stack = undefined; return { status: "error", error } as const; } @@ -385,7 +355,6 @@ export const createBuild = async ({ const error = new BuildError( "Views schema cannot be the same as the schema.", ); - error.stack = undefined; return { status: "error", error } as const; } @@ -393,7 +362,6 @@ export const createBuild = async ({ const error = new BuildError( `Invalid schema name. "ponder_sync" is a reserved schema name.`, ); - error.stack = undefined; return { status: "error", error } as const; } @@ -401,7 +369,6 @@ export const createBuild = async ({ const error = new BuildError( `Invalid views schema name. "ponder_sync" is a reserved schema name.`, ); - error.stack = undefined; return { status: "error", error } as const; } @@ -409,7 +376,6 @@ export const createBuild = async ({ const error = new BuildError( `Schema name cannot be longer than ${MAX_DATABASE_OBJECT_NAME_LENGTH} characters.`, ); - error.stack = undefined; return { status: "error", error } as const; } @@ -417,7 +383,6 @@ export const createBuild = async ({ const error = new BuildError( `Views schema name cannot be longer than ${MAX_DATABASE_OBJECT_NAME_LENGTH} characters.`, ); - error.stack = undefined; return { status: "error", error } as const; } @@ -429,60 +394,58 @@ export const createBuild = async ({ } as const; }, preCompile({ config }): Result { - const preBuild = safeBuildPre({ - config, - options: common.options, - logger: common.logger, - }); - if (preBuild.status === "error") { - return preBuild; - } + try { + const preBuild = buildPre({ + config, + options: common.options, + logger: common.logger, + }); - return { - status: "success", - result: { - databaseConfig: preBuild.databaseConfig, - ordering: preBuild.ordering, - }, - } as const; + return { + status: "success", + result: { + databaseConfig: preBuild.databaseConfig, + ordering: preBuild.ordering, + }, + } as const; + } catch (error) { + return { status: "error", error: error as Error } as const; + } }, compileSchema({ schema, preBuild }) { - const buildSchemaResult = safeBuildSchema({ schema, preBuild }); - - if (buildSchemaResult.status === "error") { - return buildSchemaResult; + try { + const { statements } = buildSchema({ schema, preBuild }); + + return { + status: "success", + result: { schema, statements }, + } as const; + } catch (error) { + return { status: "error", error: error as Error } as const; } - - return { - status: "success", - result: { - schema, - statements: buildSchemaResult.statements, - }, - } as const; }, compileConfig({ configResult }) { - // Validates and builds the config - const buildConfigResult = safeBuildConfig({ - common, - config: configResult.config, - }); - if (buildConfigResult.status === "error") { - return buildConfigResult; - } + try { + const buildConfigResult = buildConfig({ + common, + config: configResult.config, + }); - for (const log of buildConfigResult.logs) { - const { level, ...rest } = log; - common.logger[level](rest); - } + for (const log of buildConfigResult.logs) { + const { level, ...rest } = log; + common.logger[level](rest); + } - return { - status: "success", - result: { - chains: buildConfigResult.chains, - rpcs: buildConfigResult.rpcs, - }, - } as const; + return { + status: "success", + result: { + chains: buildConfigResult.chains, + rpcs: buildConfigResult.rpcs, + }, + } as const; + } catch (error) { + return { status: "error", error: error as Error } as const; + } }, async compileIndexing({ configResult, @@ -490,43 +453,43 @@ export const createBuild = async ({ indexingResult, configBuild, }) { - // Validates and builds the config - const buildIndexingFunctionsResult = await safeBuildIndexingFunctions({ - common, - config: configResult.config, - indexingFunctions: indexingResult.indexingFunctions, - configBuild, - }); - if (buildIndexingFunctionsResult.status === "error") { - return buildIndexingFunctionsResult; - } - - for (const log of buildIndexingFunctionsResult.logs) { - const { level, ...rest } = log; - common.logger[level](rest); - } + try { + const buildIndexingFunctionsResult = await buildIndexingFunctions({ + common, + config: configResult.config, + indexingFunctions: indexingResult.indexingFunctions, + configBuild, + }); - const buildId = createHash("sha256") - .update(BUILD_ID_VERSION) - .update(configResult.contentHash) - .update(schemaResult.contentHash) - .update(indexingResult.contentHash) - .digest("hex") - .slice(0, 10); + for (const log of buildIndexingFunctionsResult.logs) { + const { level, ...rest } = log; + common.logger[level](rest); + } - return { - status: "success", - result: { - buildId, - chains: buildIndexingFunctionsResult.chains, - rpcs: buildIndexingFunctionsResult.rpcs, - finalizedBlocks: buildIndexingFunctionsResult.finalizedBlocks, - eventCallbacks: buildIndexingFunctionsResult.eventCallbacks, - setupCallbacks: buildIndexingFunctionsResult.setupCallbacks, - contracts: buildIndexingFunctionsResult.contracts, - indexingFunctions: indexingResult.indexingFunctions, - }, - } as const; + const buildId = createHash("sha256") + .update(BUILD_ID_VERSION) + .update(configResult.contentHash) + .update(schemaResult.contentHash) + .update(indexingResult.contentHash) + .digest("hex") + .slice(0, 10); + + return { + status: "success", + result: { + buildId, + chains: buildIndexingFunctionsResult.chains, + rpcs: buildIndexingFunctionsResult.rpcs, + finalizedBlocks: buildIndexingFunctionsResult.finalizedBlocks, + eventCallbacks: buildIndexingFunctionsResult.eventCallbacks, + setupCallbacks: buildIndexingFunctionsResult.setupCallbacks, + contracts: buildIndexingFunctionsResult.contracts, + indexingFunctions: indexingResult.indexingFunctions, + }, + } as const; + } catch (error) { + return { status: "error", error: error as Error } as const; + } }, async compileApi({ apiResult }) { for (const route of apiResult.app.routes) { @@ -535,13 +498,11 @@ export const createBuild = async ({ route.path === "/ready" || route.path === "/status" || route.path === "/metrics" || - route.path === "/health" || - route.path === "/client" + route.path === "/health" ) { const error = new BuildError( `Validation failed: API route "${route.path}" is reserved for internal use.`, ); - error.stack = undefined; return { status: "error", error } as const; } } @@ -691,9 +652,10 @@ export const createBuild = async ({ rpc_chain_id: hexToNumber(chainId), }); } - } catch (e) { - const error = new RetryableError("Failed to connect to JSON-RPC"); - error.stack = undefined; + } catch (_error) { + const error = new BuildError("Failed to connect to JSON-RPC", { + cause: _error as Error, + }); return { status: "error", error } as const; } @@ -717,67 +679,43 @@ export const createBuild = async ({ return { status: "success", result: undefined }; }, - async databaseDiagnostic({ preBuild }) { + async databaseDiagnostic({ preBuild, database }) { const context = { logger: common.logger.child({ action: "database_diagnostic" }), }; const endClock = startClock(); - const dialect = preBuild.databaseConfig.kind; - if (dialect === "pglite") { - const driver = createPglite(preBuild.databaseConfig.options); - const qb = createQB(drizzlePglite(driver), { common }); - try { - await qb.wrap((db) => db.execute("SELECT version()"), context); - } catch (e) { - const error = new RetryableError( - `Failed to connect to PGlite database. Please check your database connection settings.\n\n${(e as any).message}`, - ); - error.stack = undefined; - return { status: "error", error }; - } finally { - await driver.close(); - } + try { + await database.adminQB.wrap( + (db) => db.execute("SELECT version()"), + context, + ); + } catch (_error) { + const error = new BuildError("Failed to connect to database", { + cause: _error as Error, + }); + return { status: "error", error } as const; + } + if (preBuild.databaseConfig.kind === "postgres") { + common.logger.info({ + msg: "Connected to database", + type: database.driver.dialect, + database: getDatabaseName(preBuild.databaseConfig.poolConfig), + duration: endClock(), + }); + } else if (preBuild.databaseConfig.kind === "pglite") { const pgliteDir = preBuild.databaseConfig.options.dataDir; const pglitePath = pgliteDir === "memory://" ? "memory://" : path.relative(common.options.rootDir, pgliteDir); - common.logger.info({ - msg: "Connected to database", - type: dialect, - database: pglitePath, - duration: endClock(), - }); - } else if (dialect === "postgres") { - const pool = createPool( - { - ...preBuild.databaseConfig.poolConfig, - application_name: "test", - max: 1, - statement_timeout: 10_000, - }, - common.logger, - ); - const qb = createQB(drizzleNodePostgres(pool), { common }); - try { - await qb.wrap((db) => db.execute("SELECT version()"), context); - } catch (e) { - const error = new RetryableError( - `Failed to connect to database. Please check your database connection settings.\n\n${(e as any).message}`, - ); - error.stack = undefined; - return { status: "error", error }; - } finally { - await pool.end(); - } common.logger.info({ msg: "Connected to database", - type: dialect, - database: getDatabaseName(preBuild.databaseConfig.poolConfig), + type: database.driver.dialect, + database: pglitePath, duration: endClock(), }); } diff --git a/packages/core/src/build/pre.ts b/packages/core/src/build/pre.ts index 1afc56f80..59310985b 100644 --- a/packages/core/src/build/pre.ts +++ b/packages/core/src/build/pre.ts @@ -42,7 +42,7 @@ export function buildPre({ if (connectionString === undefined) { if (config.database.poolConfig === undefined) { - throw new Error( + throw new BuildError( "Invalid database configuration: Either 'connectionString' or 'poolConfig' must be defined.", ); } @@ -88,27 +88,3 @@ export function buildPre({ ordering: config.ordering ?? "multichain", }; } - -export function safeBuildPre({ - config, - options, - logger, -}: { - config: Config; - options: Pick; - logger: Logger; -}) { - try { - const result = buildPre({ config, options, logger }); - - return { - status: "success", - databaseConfig: result.databaseConfig, - ordering: result.ordering, - } as const; - } catch (_error) { - const buildError = new BuildError((_error as Error).message); - buildError.stack = undefined; - return { status: "error", error: buildError } as const; - } -} diff --git a/packages/core/src/build/schema.ts b/packages/core/src/build/schema.ts index a3c2c740a..34b255b52 100644 --- a/packages/core/src/build/schema.ts +++ b/packages/core/src/build/schema.ts @@ -51,13 +51,13 @@ export const buildSchema = ({ name === PONDER_META_TABLE_NAME || name === PONDER_CHECKPOINT_TABLE_NAME ) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}' is a reserved table name.`, ); } if (name.length > MAX_DATABASE_OBJECT_NAME_LENGTH) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}' table name cannot be longer than ${MAX_DATABASE_OBJECT_NAME_LENGTH} characters.`, ); } @@ -67,7 +67,7 @@ export const buildSchema = ({ for (const [columnName, column] of Object.entries(getTableColumns(s))) { if (column.primary) { if (hasPrimaryKey) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}' has multiple primary keys.`, ); } else { @@ -81,44 +81,44 @@ export const buildSchema = ({ column instanceof PgBigSerial53 || column instanceof PgBigSerial64 ) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.${columnName}' has a serial column and serial columns are unsupported.`, ); } if (column.isUnique) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.${columnName}' has a unique constraint and unique constraints are unsupported.`, ); } if (column.generated !== undefined) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.${columnName}' is a generated column and generated columns are unsupported.`, ); } if (column.generatedIdentity !== undefined) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.${columnName}' is a generated column and generated columns are unsupported.`, ); } if (column.hasDefault) { if (column.default && column.default instanceof SQL) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.${columnName}' is a default column and default columns with raw sql are unsupported.`, ); } if (column.defaultFn && column.defaultFn() instanceof SQL) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.${columnName}' is a default column and default columns with raw sql are unsupported.`, ); } if (column.onUpdateFn && column.onUpdateFn() instanceof SQL) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.${columnName}' is a default column and default columns with raw sql are unsupported.`, ); } @@ -136,13 +136,13 @@ export const buildSchema = ({ column.name === "operation" || column.name === "checkpoint" ) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.${columnName}' is a reserved column name.`, ); } if (columnNames.has(column.name)) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.${column.name}' column name is used multiple times.`, ); } else { @@ -152,7 +152,7 @@ export const buildSchema = ({ if (preBuild.ordering === "experimental_isolated") { if (hasChainIdColumn === false) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}' does not have required 'chainId' column.`, ); } @@ -161,7 +161,7 @@ export const buildSchema = ({ getTableColumns(s).chainId!.dataType !== "number" && getTableColumns(s).chainId!.dataType !== "bigint" ) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}'.chainId column must be an integer or numeric.`, ); } @@ -170,14 +170,14 @@ export const buildSchema = ({ getPrimaryKeyColumns(s).some(({ sql }) => sql === "chain_id") === false ) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.chain_id' column is required to be in the primary key when ordering is 'isolated'.`, ); } } if (tableNames.has(getTableName(s))) { - throw new Error( + throw new BuildError( `Schema validation failed: table name '${getTableName(s)}' is used multiple times.`, ); } else { @@ -185,13 +185,13 @@ export const buildSchema = ({ } if (getTableConfig(s).primaryKeys.length > 1) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}' has multiple primary keys.`, ); } if (getTableConfig(s).primaryKeys.length === 1 && hasPrimaryKey) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}' has multiple primary keys.`, ); } @@ -200,25 +200,25 @@ export const buildSchema = ({ getTableConfig(s).primaryKeys.length === 0 && hasPrimaryKey === false ) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}' has no primary key. Declare one with ".primaryKey()".`, ); } if (getTableConfig(s).foreignKeys.length > 0) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}' has a foreign key constraint and foreign key constraints are unsupported.`, ); } if (getTableConfig(s).checks.length > 0) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}' has a check constraint and check constraints are unsupported.`, ); } if (getTableConfig(s).uniqueConstraints.length > 0) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}' has a unique constraint and unique constraints are unsupported.`, ); } @@ -227,7 +227,7 @@ export const buildSchema = ({ // Note: Ponder lets postgres handle the index name length limit and truncation. if (index.config.name && indexNames.has(index.config.name)) { - throw new Error( + throw new BuildError( `Schema validation failed: index name '${index.config.name}' is used multiple times.`, ); } else if (index.config.name) { @@ -237,7 +237,7 @@ export const buildSchema = ({ } if (is(s, PgSequence)) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}' is a sequence and sequences are unsupported.`, ); } @@ -246,7 +246,7 @@ export const buildSchema = ({ // Note: Ponder lets postgres handle the view name length limit and truncation. if (viewNames.has(getViewName(s))) { - throw new Error( + throw new BuildError( `Schema validation failed: view name '${getViewName(s)}' is used multiple times.`, ); } else { @@ -256,19 +256,19 @@ export const buildSchema = ({ const viewConfig = getViewConfig(s); if (viewConfig.selectedFields.length === 0) { - throw new Error( + throw new BuildError( `Schema validation failed: view '${getViewName(s)}' has no selected fields.`, ); } if (viewConfig.isExisting) { - throw new Error( + throw new BuildError( `Schema validation failed: view '${getViewName(s)}' is an existing view and existing views are unsupported.`, ); } if (viewConfig.query === undefined) { - throw new Error( + throw new BuildError( `Schema validation failed: view '${getViewName(s)}' has no underlying query.`, ); } @@ -283,7 +283,7 @@ export const buildSchema = ({ is(column, PgColumn) === false && is(column, SQL.Aliased) === false ) { - throw new Error( + throw new BuildError( `Schema validation failed: view '${getViewName(s)}.${columnName}' is a non-column selected field.`, ); } @@ -295,31 +295,31 @@ export const buildSchema = ({ column instanceof PgBigSerial53 || column instanceof PgBigSerial64 ) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.${columnName}' has a serial column and serial columns are unsupported.`, ); } if (column.isUnique) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.${columnName}' has a unique constraint and unique constraints are unsupported.`, ); } if (column.generated !== undefined) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.${columnName}' is a generated column and generated columns are unsupported.`, ); } if (column.generatedIdentity !== undefined) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.${columnName}' is a generated column and generated columns are unsupported.`, ); } if (columnNames.has((column as PgColumn).name)) { - throw new Error( + throw new BuildError( `Schema validation failed: '${name}.${(column as PgColumn).name}' column name is used multiple times.`, ); } else { @@ -331,28 +331,10 @@ export const buildSchema = ({ } if (tableNames.size > TABLE_LIMIT) { - throw new Error( + throw new BuildError( `Schema validation failed: the maximum number of tables is ${TABLE_LIMIT}.`, ); } return { statements }; }; - -export const safeBuildSchema = ({ - schema, - preBuild, -}: { schema: Schema; preBuild: Pick }) => { - try { - const result = buildSchema({ schema, preBuild }); - - return { - status: "success", - ...result, - } as const; - } catch (_error) { - const buildError = new BuildError((_error as Error).message); - buildError.stack = undefined; - return { status: "error", error: buildError } as const; - } -}; diff --git a/packages/core/src/build/stacktrace.ts b/packages/core/src/build/stacktrace.ts index 9667e7eaa..b8d43c22c 100644 --- a/packages/core/src/build/stacktrace.ts +++ b/packages/core/src/build/stacktrace.ts @@ -2,15 +2,15 @@ import { readFileSync } from "node:fs"; import { codeFrameColumns } from "@babel/code-frame"; import { parse as parseStackTrace } from "stacktrace-parser"; -class ESBuildTransformError extends Error { +export class ESBuildTransformError extends Error { override name = "ESBuildTransformError"; } -class ESBuildBuildError extends Error { +export class ESBuildBuildError extends Error { override name = "ESBuildBuildError"; } -class ESBuildContextError extends Error { +export class ESBuildContextError extends Error { override name = "ESBuildContextError"; } @@ -131,7 +131,7 @@ export function parseViteNodeError(file: string, error: Error): ViteNodeError { // This can throw with "Cannot set property message of [object Object] which has only a getter" try { resolvedError.message = `Error while ${verb} ${file}: ${resolvedError.message}`; - } catch (e) {} + } catch {} return resolvedError; } diff --git a/packages/core/src/database/actions.test.ts b/packages/core/src/database/actions.test.ts index 31816056c..18a09f9b3 100644 --- a/packages/core/src/database/actions.test.ts +++ b/packages/core/src/database/actions.test.ts @@ -7,7 +7,6 @@ import { import { buildSchema } from "@/build/schema.js"; import { getReorgTable } from "@/drizzle/kit/index.js"; import { onchainTable, primaryKey } from "@/drizzle/onchain.js"; -import type { RetryableError } from "@/internal/errors.js"; import type { IndexingErrorHandler } from "@/internal/types.js"; import { type Checkpoint, @@ -44,16 +43,16 @@ function createCheckpoint(checkpoint: Partial): string { } const indexingErrorHandler: IndexingErrorHandler = { - getRetryableError: () => { + getError: () => { return indexingErrorHandler.error; }, - setRetryableError: (error: RetryableError) => { + setError: (error: Error) => { indexingErrorHandler.error = error; }, - clearRetryableError: () => { + clearError: () => { indexingErrorHandler.error = undefined; }, - error: undefined as RetryableError | undefined, + error: undefined as Error | undefined, }; test("finalize()", async () => { diff --git a/packages/core/src/database/index.test.ts b/packages/core/src/database/index.test.ts index c3512d527..fc969fade 100644 --- a/packages/core/src/database/index.test.ts +++ b/packages/core/src/database/index.test.ts @@ -7,7 +7,6 @@ import { onchainView, primaryKey, } from "@/drizzle/onchain.js"; -import type { RetryableError } from "@/internal/errors.js"; import { createShutdown } from "@/internal/shutdown.js"; import type { IndexingErrorHandler } from "@/internal/types.js"; import { @@ -55,16 +54,16 @@ function createCheckpoint(checkpoint: Partial): string { } const indexingErrorHandler: IndexingErrorHandler = { - getRetryableError: () => { + getError: () => { return indexingErrorHandler.error; }, - setRetryableError: (error: RetryableError) => { + setError: (error: Error) => { indexingErrorHandler.error = error; }, - clearRetryableError: () => { + clearError: () => { indexingErrorHandler.error = undefined; }, - error: undefined as RetryableError | undefined, + error: undefined as Error | undefined, }; test("migrate() succeeds with empty schema", async () => { diff --git a/packages/core/src/database/index.ts b/packages/core/src/database/index.ts index 467f3f0d5..a031c7002 100644 --- a/packages/core/src/database/index.ts +++ b/packages/core/src/database/index.ts @@ -8,7 +8,7 @@ import { import type { Common } from "@/internal/common.js"; import { MigrationError, - NonRetryableUserError, + QueryBuilderError, ShutdownError, } from "@/internal/errors.js"; import type { @@ -48,7 +48,7 @@ import { dropLiveQueryTriggers, dropTriggers, } from "./actions.js"; -import { type QB, createQB, parseDbError } from "./queryBuilder.js"; +import { type QB, createQB, shouldRetry } from "./queryBuilder.js"; export type Database = { driver: PostgresDriver | PGliteDriver; @@ -454,7 +454,9 @@ export const createDatabase = ({ return; } catch (_error) { - const error = parseDbError(_error); + const error = new QueryBuilderError(undefined, { + cause: _error as Error, + }); if (common.shutdown.isKilled) { throw new ShutdownError(); @@ -468,20 +470,12 @@ export const createDatabase = ({ method: "migrate_sync", }); - common.logger.warn({ - msg: "Failed database query", - query: "migrate_sync", - retry_count: i, - error, - }); - - if (error instanceof NonRetryableUserError) { + if (shouldRetry(error.cause) === false) { common.logger.warn({ msg: "Failed database query", query: "migrate_sync", error, }); - throw error; } if (i === 9) { @@ -540,13 +534,10 @@ export const createDatabase = ({ ); } } catch (_error) { - let error = _error as Error; - if (!error.message.includes("already exists")) throw error; - error = new MigrationError( - `Unable to create table '${namespace.schema}'.'${schemaBuild.statements.tables.json[i]!.tableName}' because a table with that name already exists.`, + throw new MigrationError( + `Unable to create table '${namespace.schema}'.'${schemaBuild.statements.tables.json[i]!.tableName}'.`, + { cause: _error as Error }, ); - error.stack = undefined; - throw error; } } @@ -569,13 +560,10 @@ export const createDatabase = ({ context, ) .catch((_error) => { - const error = _error as Error; - if (!error.message.includes("already exists")) throw error; - const e = new MigrationError( - `Unable to create view "${namespace.schema}"."${schemaBuild.statements.views.json[i]!.name}" because a view with that name already exists.`, + throw new MigrationError( + `Unable to create view "${namespace.schema}"."${schemaBuild.statements.views.json[i]!.name}".`, + { cause: _error as Error }, ); - e.stack = undefined; - throw e; }); } }; @@ -588,13 +576,10 @@ export const createDatabase = ({ context, ) .catch((_error) => { - const error = _error as Error; - if (!error.message.includes("already exists")) throw error; - const e = new MigrationError( - `Unable to create enum "${namespace.schema}"."${schemaBuild.statements.enums.json[i]!.name}" because an enum with that name already exists.`, + throw new MigrationError( + `Unable to create enum "${namespace.schema}"."${schemaBuild.statements.enums.json[i]!.name}".`, + { cause: _error as Error }, ); - e.stack = undefined; - throw e; }); } }; @@ -854,11 +839,9 @@ CREATE TABLE IF NOT EXISTS "${namespace.schema}"."${PONDER_CHECKPOINT_TABLE_NAME // Note: ponder <=0.8 will evaluate this as true because the version is undefined if (previousApp.version !== VERSION) { - const error = new MigrationError( + throw new MigrationError( `Schema "${namespace.schema}" was previously used by a Ponder app with a different minor version. Drop the schema first, or use a different schema. Read more: https://ponder.sh/docs/database#database-schema`, ); - error.stack = undefined; - throw error; } if ( @@ -866,11 +849,9 @@ CREATE TABLE IF NOT EXISTS "${namespace.schema}"."${PONDER_CHECKPOINT_TABLE_NAME (common.options.command === "dev" || previousApp.build_id !== buildId) ) { - const error = new MigrationError( + throw new MigrationError( `Schema "${namespace.schema}" was previously used by a different Ponder app. Drop the schema first, or use a different schema. Read more: https://ponder.sh/docs/database#database-schema`, ); - error.stack = undefined; - throw error; } const expiry = @@ -986,11 +967,9 @@ CREATE TABLE IF NOT EXISTS "${namespace.schema}"."${PONDER_CHECKPOINT_TABLE_NAME result = await tryAcquireLockAndMigrate(); if (result.status === "locked") { - const error = new MigrationError( + throw new MigrationError( `Failed to acquire lock on schema "${namespace.schema}". A different Ponder app is actively using this schema.`, ); - error.stack = undefined; - throw error; } } diff --git a/packages/core/src/database/queryBuilder.test.ts b/packages/core/src/database/queryBuilder.test.ts index eab235d90..1f6d1d499 100644 --- a/packages/core/src/database/queryBuilder.test.ts +++ b/packages/core/src/database/queryBuilder.test.ts @@ -1,5 +1,4 @@ import { context, setupCommon, setupIsolatedDatabase } from "@/_test/setup.js"; -import { NotNullConstraintError } from "@/internal/errors.js"; import { createPool } from "@/utils/pg.js"; import { sql } from "drizzle-orm"; import { drizzle } from "drizzle-orm/node-postgres"; @@ -101,6 +100,13 @@ test("QB transaction retries error", async () => { // BEGIN, BEGIN, SELECT, ROLLBACK, BEGIN, SELECT, COMMIT expect(querySpy).toHaveBeenCalledTimes(7); + // unrecognized errors are propagated + await expect( + qb.transaction({ label: "test1" }, async () => { + throw new Error("i'm an error"); + }), + ).rejects.toThrow("i'm an error"); + connection.release(); }); @@ -118,12 +124,19 @@ test("QB parses error", async () => { const querySpy = vi.spyOn(pool, "query"); querySpy.mockRejectedValueOnce(new Error("violates not-null constraint")); - const error = await qb - .wrap({ label: "test1" }, (db) => db.select().from(SCHEMATA)) - .catch((error) => error); + await expect( + qb.wrap({ label: "test1" }, (db) => db.select().from(SCHEMATA)), + ).rejects.toThrow(); expect(querySpy).toHaveBeenCalledTimes(1); - expect(error).toBeInstanceOf(NotNullConstraintError); + + await expect( + qb.wrap({ label: "test2" }, (db) => + db.execute("SELECT * FRROM information_schema.schemata"), + ), + ).rejects.toThrow(); + + expect(querySpy).toHaveBeenCalledTimes(2); }); test("QB client", async () => { diff --git a/packages/core/src/database/queryBuilder.ts b/packages/core/src/database/queryBuilder.ts index 163e3993f..0821d6ca5 100644 --- a/packages/core/src/database/queryBuilder.ts +++ b/packages/core/src/database/queryBuilder.ts @@ -1,14 +1,12 @@ import crypto from "node:crypto"; import type { Common } from "@/internal/common.js"; -import { BaseError } from "@/internal/errors.js"; import { + BaseError, BigIntSerializationError, - CheckConstraintError, - DbConnectionError, - NonRetryableUserError, - NotNullConstraintError, + QueryBuilderError, ShutdownError, - UniqueConstraintError, + TransactionCallbackError, + TransactionStatementError, } from "@/internal/errors.js"; import type { Logger } from "@/internal/logger.js"; import type { Schema } from "@/internal/types.js"; @@ -87,52 +85,6 @@ export type QB< | { $dialect: "postgres"; $client: pg.Pool | pg.PoolClient } ); -export const parseDbError = (error: any): Error => { - const stack = error.stack; - - if (error instanceof BaseError) { - return error; - } - - if (error?.message?.includes("violates not-null constraint")) { - error = new NotNullConstraintError(error.message); - } else if (error?.message?.includes("violates unique constraint")) { - error = new UniqueConstraintError(error.message); - } else if (error?.message?.includes("violates check constraint")) { - error = new CheckConstraintError(error.message); - } else if ( - // nodejs error message - error?.message?.includes("Do not know how to serialize a BigInt") || - // bun error message - error?.message?.includes("cannot serialize BigInt") - ) { - error = new BigIntSerializationError(error.message); - error.meta.push( - "Hint:\n The JSON column type does not support BigInt values. Use the replaceBigInts() helper function before inserting into the database. Docs: https://ponder.sh/docs/api-reference/ponder-utils#replacebigints", - ); - } else if (error?.message?.includes("does not exist")) { - error = new NonRetryableUserError(error.message); - } else if (error?.message?.includes("already exists")) { - error = new NonRetryableUserError(error.message); - } else if ( - error?.message?.includes( - "terminating connection due to administrator command", - ) || - error?.message?.includes("connection to client lost") || - error?.message?.includes("too many clients already") || - error?.message?.includes("Connection terminated unexpectedly") || - error?.message?.includes("ECONNRESET") || - error?.message?.includes("ETIMEDOUT") || - error?.message?.includes("timeout exceeded when trying to connect") - ) { - error = new DbConnectionError(error.message); - } - - error.stack = stack; - - return error; -}; - /** * Create a query builder. * @@ -209,8 +161,10 @@ export const createQB = < } return result; - } catch (e) { - const error = parseDbError(e); + } catch (_error) { + const error = new QueryBuilderError(undefined, { + cause: _error as Error, + }); if (common.shutdown.isKilled) { throw new ShutdownError(); @@ -231,17 +185,14 @@ export const createQB = < firstError = error; } - // Two types of transaction environments - // 1. Inside callback (running user statements or control flow statements): Throw error, retry - // later. We want the error bubbled up out of the callback, so the transaction is properly rolled back. - // 2. Outside callback (running entire transaction, user statements + control flow statements): Retry immediately. - - if (isTransaction) { - if (error instanceof NonRetryableUserError) { - throw error; - } - } else if (isTransactionStatement) { - // Transaction statements are not immediately retried, so the transaction will be properly rolled back. + // Contexts: + // 1. Query outside of a transaction. + // 2. Query inside of a transaction. + // 3. Transaction query: Could be caused by a query in the transaction callback, + // a transaction control statement, or a wildcard error that is not related to + // the query builder. + + if (isTransaction === false && isTransactionStatement) { logger.warn({ msg: "Failed database query", query: label, @@ -249,8 +200,15 @@ export const createQB = < duration: endClock(), error, }); - throw error; - } else if (error instanceof NonRetryableUserError) { + // Transaction statements are not immediately retried, so the transaction + // will be properly rolled back. + throw new TransactionStatementError(undefined, { cause: error }); + } else if (error.cause instanceof TransactionCallbackError) { + // Unrelated errors are bubbled out of the query builder. + throw error.cause.cause; + } + + if (shouldRetry(error.cause) === false) { logger.warn({ msg: "Failed database query", query: label, @@ -365,8 +323,18 @@ export const createQB = < } }; - const result = await callback(tx); - return result; + try { + const result = await callback(tx); + return result; + } catch (error) { + if (error instanceof TransactionStatementError) { + throw error; + } else { + throw new TransactionCallbackError(undefined, { + cause: error as Error, + }); + } + } }, config), { isTransaction: true, @@ -447,8 +415,18 @@ export const createQB = < } }; - const result = await callback(tx); - return result; + try { + const result = await callback(tx); + return result; + } catch (error) { + if (error instanceof TransactionStatementError) { + throw error; + } else { + throw new TransactionCallbackError(undefined, { + cause: error as Error, + }); + } + } }, config), { label, @@ -478,11 +456,28 @@ export const createQB = < { logger?: Logger } | undefined, ]; - // @ts-expect-error - return retryLogMetricErrorWrap(() => callback(db), { - isTransactionStatement: true, - logger: context?.logger ?? common.logger, - }); + return retryLogMetricErrorWrap( + async () => { + try { + // @ts-expect-error + const result = await callback(db); + return result; + } catch (error) { + if (error instanceof TransactionStatementError) { + throw error; + } else { + throw new TransactionCallbackError(undefined, { + cause: error as Error, + }); + } + } + }, + { + isTransaction: false, + isTransactionStatement: true, + logger: context?.logger ?? common.logger, + }, + ); } else { const [{ label }, callback, context] = args as [ { label: string }, @@ -496,12 +491,29 @@ export const createQB = < { logger?: Logger } | undefined, ]; - // @ts-expect-error - return retryLogMetricErrorWrap(() => callback(db), { - label, - isTransactionStatement: true, - logger: context?.logger ?? common.logger, - }); + return retryLogMetricErrorWrap( + async () => { + try { + // @ts-expect-error + const result = await callback(db); + return result; + } catch (error) { + if (error instanceof TransactionStatementError) { + throw error; + } else { + throw new TransactionCallbackError(undefined, { + cause: error as Error, + }); + } + } + }, + { + label, + isTransaction: false, + isTransactionStatement: true, + logger: context?.logger ?? common.logger, + }, + ); } }; } @@ -535,3 +547,51 @@ export const createQB = < return qb; }; + +export function shouldRetry(error: Error) { + if (error?.message?.includes("violates not-null constraint")) { + return false; + } + if (error?.message?.includes("violates unique constraint")) { + return false; + } + if (error?.message?.includes("violates check constraint")) { + return false; + } + if ( + error instanceof BigIntSerializationError || + // nodejs error message + error?.message?.includes("Do not know how to serialize a BigInt") || + // bun error message + error?.message?.includes("cannot serialize BigInt") + ) { + return false; + } + if (error?.message?.includes("does not exist")) { + return false; + } + if (error?.message?.includes("already exists")) { + return false; + } + if (error?.message?.includes("syntax error")) { + return false; + } + if (error instanceof BaseError && error.cause) { + if (shouldRetry(error.cause) === false) return false; + } + + // if ( + // error?.message?.includes( + // "terminating connection due to administrator command", + // ) || + // error?.message?.includes("connection to client lost") || + // error?.message?.includes("too many clients already") || + // error?.message?.includes("Connection terminated unexpectedly") || + // error?.message?.includes("ECONNRESET") || + // error?.message?.includes("ETIMEDOUT") || + // error?.message?.includes("timeout exceeded when trying to connect") + // ) { + // } + + return true; +} diff --git a/packages/core/src/drizzle/json.ts b/packages/core/src/drizzle/json.ts index a1c61c72f..ed8c6e080 100644 --- a/packages/core/src/drizzle/json.ts +++ b/packages/core/src/drizzle/json.ts @@ -1,4 +1,4 @@ -import { type BaseError, BigIntSerializationError } from "@/internal/errors.js"; +import { BigIntSerializationError } from "@/internal/errors.js"; import { type ColumnBaseConfig, entityKind } from "drizzle-orm"; import type { ColumnBuilderBaseConfig, @@ -61,8 +61,8 @@ export class PgJson< // bun error message error?.message?.includes("cannot serialize BigInt") ) { - error = new BigIntSerializationError(error.message); - (error as BaseError).meta.push( + error = new BigIntSerializationError(undefined, { cause: error }); + (error as BigIntSerializationError).meta.push( "Hint:\n The JSON column type does not support BigInt values. Use the replaceBigInts() helper function before inserting into the database. Docs: https://ponder.sh/docs/api-reference/ponder-utils#replacebigints", ); } @@ -135,10 +135,15 @@ export class PgJsonb< return JSON.stringify(value); } catch (_error) { let error = _error as Error; - if (error?.message?.includes("Do not know how to serialize a BigInt")) { - error = new BigIntSerializationError(error.message); - (error as BaseError).meta.push( - "Hint:\n The JSON column type does not support BigInt values. Use the replaceBigInts() helper function before inserting into the database. Docs: https://ponder.sh/docs/api-reference/ponder-utils#replacebigints", + if ( + // node error message + error?.message?.includes("Do not know how to serialize a BigInt") || + // bun error message + error?.message?.includes("cannot serialize BigInt") + ) { + error = new BigIntSerializationError(undefined, { cause: error }); + (error as BigIntSerializationError).meta.push( + "Hint:\n The JSONB column type does not support BigInt values. Use the replaceBigInts() helper function before inserting into the database. Docs: https://ponder.sh/docs/api-reference/ponder-utils#replacebigints", ); } diff --git a/packages/core/src/index.ts b/packages/core/src/index.ts index a4efda515..3b1f11d53 100644 --- a/packages/core/src/index.ts +++ b/packages/core/src/index.ts @@ -35,6 +35,8 @@ export { primaryKey, hex, bigint, + json, + jsonb, } from "@/drizzle/onchain.js"; export type { @@ -112,8 +114,6 @@ export { inet, integer, interval, - json, - jsonb, line, macaddr, macaddr8, diff --git a/packages/core/src/indexing-store/cache.test.ts b/packages/core/src/indexing-store/cache.test.ts index 8b8a657be..777cd9d6d 100644 --- a/packages/core/src/indexing-store/cache.test.ts +++ b/packages/core/src/indexing-store/cache.test.ts @@ -11,7 +11,7 @@ import { deployErc20, mintErc20 } from "@/_test/simulate.js"; import { getErc20IndexingBuild, getSimulatedEvent } from "@/_test/utils.js"; import { onchainEnum, onchainTable } from "@/drizzle/onchain.js"; import { getEventCount } from "@/indexing/index.js"; -import type { RetryableError } from "@/internal/errors.js"; +import { DelayedInsertError } from "@/internal/errors.js"; import type { IndexingErrorHandler } from "@/internal/types.js"; import { parseEther, zeroAddress } from "viem"; import { beforeEach, expect, test } from "vitest"; @@ -24,16 +24,16 @@ beforeEach(setupCleanup); beforeEach(setupAnvil); const indexingErrorHandler: IndexingErrorHandler = { - getRetryableError: () => { + getError: () => { return indexingErrorHandler.error; }, - setRetryableError: (error: RetryableError) => { + setError: (error: Error) => { indexingErrorHandler.error = error; }, - clearRetryableError: () => { + clearError: () => { indexingErrorHandler.error = undefined; }, - error: undefined as RetryableError | undefined, + error: undefined as Error | undefined, }; test("flush() insert", async () => { @@ -215,9 +215,7 @@ test("flush() recovers error", async () => { balance: 10n, }); - await expect(indexingCache.flush()).rejects.toThrowError( - `duplicate key value violates unique constraint "account_pkey"`, - ); + await expect(indexingCache.flush()).rejects.toThrow(DelayedInsertError); }); }); diff --git a/packages/core/src/indexing-store/cache.ts b/packages/core/src/indexing-store/cache.ts index bd90efd6a..5fc150ea7 100644 --- a/packages/core/src/indexing-store/cache.ts +++ b/packages/core/src/indexing-store/cache.ts @@ -5,9 +5,9 @@ import { getPartitionName } from "@/drizzle/onchain.js"; import { addErrorMeta, toErrorMeta } from "@/indexing/index.js"; import type { Common } from "@/internal/common.js"; import { - CopyFlushError, DelayedInsertError, ShutdownError, + TransactionStatementError, } from "@/internal/errors.js"; import type { CrashRecoveryCheckpoint, @@ -285,10 +285,11 @@ export const getCopyHelper = (qb: QB, chainId?: number) => { .query(`COPY ${target} FROM '/dev/blob'`, [], { blob: new Blob([text]), }) - // Note: `TransactionError` is applied because the query - // uses the low-level `$client.query` method. .catch((error) => { - throw new CopyFlushError(error.message); + throw new TransactionStatementError( + `Failed COPY operation for table "${getTableName(table)}"`, + { cause: error as Error }, + ); }); }; } else { @@ -307,7 +308,10 @@ export const getCopyHelper = (qb: QB, chainId?: number) => { copyStream.write(text); copyStream.end(); }).catch((error) => { - throw new CopyFlushError(error.message); + throw new TransactionStatementError( + `Failed COPY operation for table "${getTableName(table)}"`, + { cause: error as Error }, + ); }); }; } @@ -757,8 +761,9 @@ export const createIndexingCache = ({ ); if (result.status === "error") { - error = new DelayedInsertError(result.error.message); - error.stack = undefined; + error = new DelayedInsertError(undefined, { + cause: result.error as Error, + }); addErrorMeta( error, @@ -829,8 +834,9 @@ export const createIndexingCache = ({ ); if (result.status === "error") { - error = new DelayedInsertError(result.error.message); - error.stack = undefined; + error = new DelayedInsertError(undefined, { + cause: result.error as Error, + }); addErrorMeta( error, diff --git a/packages/core/src/indexing-store/index.test.ts b/packages/core/src/indexing-store/index.test.ts index f7dac125f..8f12ca057 100644 --- a/packages/core/src/indexing-store/index.test.ts +++ b/packages/core/src/indexing-store/index.test.ts @@ -10,9 +10,8 @@ import { getRejectionValue } from "@/_test/utils.js"; import { onchainEnum, onchainTable } from "@/drizzle/onchain.js"; import { BigIntSerializationError, - NonRetryableUserError, + IndexingDBError, RawSqlError, - type RetryableError, } from "@/internal/errors.js"; import type { IndexingErrorHandler } from "@/internal/types.js"; import { eq } from "drizzle-orm"; @@ -27,16 +26,16 @@ beforeEach(setupIsolatedDatabase); beforeEach(setupCleanup); const indexingErrorHandler: IndexingErrorHandler = { - getRetryableError: () => { + getError: () => { return indexingErrorHandler.error; }, - setRetryableError: (error: RetryableError) => { + setError: (error: Error) => { indexingErrorHandler.error = error; }, - clearRetryableError: () => { + clearError: () => { indexingErrorHandler.error = undefined; }, - error: undefined as RetryableError | undefined, + error: undefined as Error | undefined, }; test("find", async () => { @@ -415,7 +414,7 @@ test("update throw error when primary key is updated", async () => { .set({ address: ALICE }) .catch((error) => error); - expect(error).toBeInstanceOf(NonRetryableUserError); + expect(error).toBeInstanceOf(IndexingDBError); // function @@ -424,7 +423,7 @@ test("update throw error when primary key is updated", async () => { .set(() => ({ address: ALICE })) .catch((error) => error); - expect(error).toBeInstanceOf(NonRetryableUserError); + expect(error).toBeInstanceOf(IndexingDBError); // update same primary key no function let row: any = await indexingStore.db @@ -566,16 +565,16 @@ test("sql", async () => { await tx.wrap((db) => db.execute("SAVEPOINT test")); - expect( - await getRejectionValue( - async () => - // @ts-ignore - await indexingStore.db.sql.insert(schema.account).values({ - address: "0x0000000000000000000000000000000000000001", - balance: undefined, - }), - ), - ).toBeInstanceOf(RawSqlError); + await expect( + // @ts-ignore + indexingStore.db.sql + .insert(schema.account) + .values({ + address: "0x0000000000000000000000000000000000000001", + balance: undefined, + }) + .then((res) => res), + ).rejects.toThrow(RawSqlError); // TODO(kyle) check constraint @@ -583,14 +582,12 @@ test("sql", async () => { await tx.wrap((db) => db.execute("ROLLBACK TO test")); - expect( - await getRejectionValue( - async () => - await indexingStore.db.sql - .insert(schema.account) - .values({ address: zeroAddress, balance: 10n }), - ), - ).toBeInstanceOf(RawSqlError); + await expect( + indexingStore.db.sql + .insert(schema.account) + .values({ address: zeroAddress, balance: 10n }) + .then((res) => res), + ).rejects.toThrow(RawSqlError); }); }); diff --git a/packages/core/src/indexing-store/index.ts b/packages/core/src/indexing-store/index.ts index 5e7c32a8c..eace6a7ba 100644 --- a/packages/core/src/indexing-store/index.ts +++ b/packages/core/src/indexing-store/index.ts @@ -1,17 +1,7 @@ import type { QB } from "@/database/queryBuilder.js"; import { onchain } from "@/drizzle/onchain.js"; import type { Common } from "@/internal/common.js"; -import { - DbConnectionError, - InvalidStoreAccessError, - InvalidStoreMethodError, - NonRetryableUserError, - RawSqlError, - RecordNotFoundError, - RetryableError, - UndefinedTableError, - UniqueConstraintError, -} from "@/internal/errors.js"; +import { IndexingDBError, RawSqlError } from "@/internal/errors.js"; import type { Schema } from "@/internal/types.js"; import type { IndexingErrorHandler, SchemaBuild } from "@/internal/types.js"; import type { Db } from "@/types/db.js"; @@ -53,7 +43,7 @@ export const validateUpdateSet = ( if (js in set) { // Note: Noop on the primary keys if they are identical, otherwise throw an error. if ((set as Row)[js] !== prev[js]) { - throw new NonRetryableUserError( + throw new IndexingDBError( `Primary key column '${js}' cannot be updated`, ); } @@ -67,13 +57,13 @@ export const checkOnchainTable = ( method: "find" | "insert" | "update" | "delete", ) => { if (table === undefined) - throw new UndefinedTableError( + throw new IndexingDBError( `Table object passed to db.${method}() is undefined`, ); if (onchain in table) return; - throw new InvalidStoreMethodError( + throw new IndexingDBError( method === "find" ? `db.find() can only be used with onchain tables, and '${getTableConfig(table).name}' is an offchain table or a view.` : `Indexing functions can only write to onchain tables, and '${getTableConfig(table).name}' is an offchain table or a view.`, @@ -88,7 +78,7 @@ export const checkTableAccess = ( ) => { if (chainId === undefined) return; if ("chainId" in key && String(key.chainId) === String(chainId)) return; - throw new InvalidStoreAccessError( + throw new IndexingDBError( "chainId" in key ? `db.${method}(${getTableConfig(table).name}) cannot access rows on different chains when ordering is 'isolated'.` : `db.${method}(${getTableConfig(table).name}) must specify 'chainId' when ordering is 'isolated'.`, @@ -121,7 +111,7 @@ export const createIndexingStore = ({ return async (...args: any[]) => { try { if (isProcessingEvents === false) { - throw new NonRetryableUserError( + throw new IndexingDBError( "A store API method (find, update, insert, delete) was called after the indexing function returned. Hint: Did you forget to await the store API method call (an unawaited promise)?", ); } @@ -129,20 +119,21 @@ export const createIndexingStore = ({ const result = await fn(...args); // @ts-expect-error typescript bug lol if (isProcessingEvents === false) { - throw new NonRetryableUserError( + throw new IndexingDBError( "A store API method (find, update, insert, delete) was called after the indexing function returned. Hint: Did you forget to await the store API method call (an unawaited promise)?", ); } return result; } catch (error) { if (isProcessingEvents === false) { - throw new NonRetryableUserError( + throw new IndexingDBError( "A store API method (find, update, insert, delete) was called after the indexing function returned. Hint: Did you forget to await the store API method call (an unawaited promise)?", ); } - if (error instanceof RetryableError) { - indexingErrorHandler.setRetryableError(error); + // Note: `error` must be an internal ponder error rather than a logical user error. + if (error instanceof IndexingDBError === false) { + indexingErrorHandler.setError(error as Error); } throw error; @@ -385,7 +376,7 @@ export const createIndexingStore = ({ }); if (row) { - throw new UniqueConstraintError( + throw new IndexingDBError( `Primary key conflict in table '${getTableName(table)}'`, ); } else { @@ -430,7 +421,7 @@ export const createIndexingStore = ({ }); if (row) { - throw new UniqueConstraintError( + throw new IndexingDBError( `Primary key conflict in table '${getTableName(table)}'`, ); } else { @@ -494,7 +485,7 @@ export const createIndexingStore = ({ const ponderRowUpdate = await indexingCache.get({ table, key }); if (ponderRowUpdate === null) { - const error = new RecordNotFoundError( + const error = new IndexingDBError( `No existing record found in table '${getTableName(table)}'`, ); error.meta.push(`db.update arguments:\n${prettyPrint(key)}`); @@ -604,11 +595,7 @@ export const createIndexingStore = ({ return result; }); } catch (error) { - if (error instanceof DbConnectionError) { - throw error; - } - - throw new RawSqlError((error as Error).message); + throw new RawSqlError(undefined, { cause: error as Error }); } finally { common.metrics.ponder_indexing_store_raw_sql_duration.observe( endClock(), diff --git a/packages/core/src/indexing-store/utils.ts b/packages/core/src/indexing-store/utils.ts index 6104944a5..8e6249a20 100644 --- a/packages/core/src/indexing-store/utils.ts +++ b/packages/core/src/indexing-store/utils.ts @@ -1,8 +1,5 @@ import { getPrimaryKeyColumns } from "@/drizzle/index.js"; -import { - BigIntSerializationError, - NotNullConstraintError, -} from "@/internal/errors.js"; +import { IndexingDBError } from "@/internal/errors.js"; import { prettyPrint } from "@/utils/print.js"; import { type Column, @@ -64,37 +61,23 @@ export const normalizeColumn = ( if (value === null) return null; if (column.mapToDriverValue === undefined) return value; - try { - if (Array.isArray(value) && column instanceof PgArray) { - return value.map((v) => { - if (column.baseColumn.columnType === "PgTimestamp") { - return v; - } - - return column.baseColumn.mapFromDriverValue( - column.baseColumn.mapToDriverValue(v), - ); - }); - } + if (Array.isArray(value) && column instanceof PgArray) { + return value.map((v) => { + if (column.baseColumn.columnType === "PgTimestamp") { + return v; + } - if (column.columnType === "PgTimestamp") { - return value; - } - - return column.mapFromDriverValue(column.mapToDriverValue(value)); - } catch (e) { - if ( - (e as Error)?.message?.includes("Do not know how to serialize a BigInt") - ) { - const error = new BigIntSerializationError((e as Error).message); - error.meta.push( - "Hint:\n The JSON column type does not support BigInt values. Use the replaceBigInts() helper function before inserting into the database. Docs: https://ponder.sh/docs/api-reference/ponder-utils#replacebigints", + return column.baseColumn.mapFromDriverValue( + column.baseColumn.mapToDriverValue(v), ); - throw error; - } + }); + } - throw e; + if (column.columnType === "PgTimestamp") { + return value; } + + return column.mapFromDriverValue(column.mapToDriverValue(value)); }; export const normalizeRow = ( @@ -110,7 +93,7 @@ export const normalizeRow = ( column.notNull && hasEmptyValue(column) === false ) { - const error = new NotNullConstraintError( + const error = new IndexingDBError( `Column '${getTableName( table, )}.${columnName}' violates not-null constraint.`, diff --git a/packages/core/src/indexing/client.ts b/packages/core/src/indexing/client.ts index 2873b9442..d70c8a237 100644 --- a/packages/core/src/indexing/client.ts +++ b/packages/core/src/indexing/client.ts @@ -1032,7 +1032,7 @@ export const cachedTransport = functionName: "aggregate3", result: resultsToEncode, }); - } catch (e) { + } catch { return encodeFunctionResult({ abi: multicall3Abi, functionName: "aggregate3", diff --git a/packages/core/src/indexing/index.test.ts b/packages/core/src/indexing/index.test.ts index 180fd8ea2..f63e8aa5d 100644 --- a/packages/core/src/indexing/index.test.ts +++ b/packages/core/src/indexing/index.test.ts @@ -18,10 +18,7 @@ import { onchainTable } from "@/drizzle/onchain.js"; import { createIndexingCache } from "@/indexing-store/cache.js"; import { createIndexingStore } from "@/indexing-store/index.js"; import { createCachedViemClient } from "@/indexing/client.js"; -import { - InvalidEventAccessError, - type RetryableError, -} from "@/internal/errors.js"; +import { InvalidEventAccessError } from "@/internal/errors.js"; import type { IndexingErrorHandler } from "@/internal/types.js"; import { createRpc } from "@/rpc/index.js"; import { parseEther, toHex, zeroAddress } from "viem"; @@ -47,16 +44,16 @@ const account = onchainTable("account", (p) => ({ const schema = { account }; const indexingErrorHandler: IndexingErrorHandler = { - getRetryableError: () => { + getError: () => { return indexingErrorHandler.error; }, - setRetryableError: (error: RetryableError) => { + setError: (error: Error) => { indexingErrorHandler.error = error; }, - clearRetryableError: () => { + clearError: () => { indexingErrorHandler.error = undefined; }, - error: undefined as RetryableError | undefined, + error: undefined as Error | undefined, }; test("createIndexing()", async () => { diff --git a/packages/core/src/indexing/index.ts b/packages/core/src/indexing/index.ts index 4a1e9fba0..d528598da 100644 --- a/packages/core/src/indexing/index.ts +++ b/packages/core/src/indexing/index.ts @@ -4,7 +4,6 @@ import type { IndexingStore } from "@/indexing-store/index.js"; import type { CachedViemClient } from "@/indexing/client.js"; import type { Common } from "@/internal/common.js"; import { - BaseError, IndexingFunctionError, InvalidEventAccessError, ShutdownError, @@ -212,13 +211,13 @@ export const createIndexing = ({ await event.setupCallback.fn(indexingFunctionArg); - // Note: Check `getRetryableError` to handle user-code catching errors + // Note: Check `getError` to handle user-code catching errors // from the indexing store. - if (indexingErrorHandler.getRetryableError()) { - const retryableError = indexingErrorHandler.getRetryableError()!; - indexingErrorHandler.clearRetryableError(); - throw retryableError; + if (indexingErrorHandler.getError()) { + const error = indexingErrorHandler.getError()!; + indexingErrorHandler.clearError(); + throw error; } common.metrics.ponder_indexing_function_duration.observe( @@ -226,21 +225,27 @@ export const createIndexing = ({ endClock(), ); } catch (_error) { - let error = _error instanceof Error ? _error : new Error(String(_error)); + if (common.shutdown.isKilled) { + throw new ShutdownError(); + } - // Note: Use `getRetryableError` rather than `error` to avoid + // Note: Use `getError` rather than `error` to avoid // issues with the user-code augmenting errors from the indexing store. - if (indexingErrorHandler.getRetryableError()) { - const retryableError = indexingErrorHandler.getRetryableError()!; - indexingErrorHandler.clearRetryableError(); - error = retryableError; - } - - if (common.shutdown.isKilled) { - throw new ShutdownError(); + let error: IndexingFunctionError; + if (indexingErrorHandler.getError()) { + error = new IndexingFunctionError(undefined, { + cause: indexingErrorHandler.getError()!, + }); + indexingErrorHandler.clearError(); + } else { + error = new IndexingFunctionError(undefined, { + cause: _error as Error, + }); } + // Copy the stack from the inner error. + error.stack = error.cause.stack; addStackTrace(error, common.options); addErrorMeta(error, toErrorMeta(event)); @@ -256,11 +261,7 @@ export const createIndexing = ({ common.metrics.hasError = true; - if (error instanceof BaseError === false) { - error = new IndexingFunctionError(error.message); - } - - throw error; + throw new IndexingFunctionError(undefined, { cause: error as Error }); } }; @@ -283,39 +284,45 @@ export const createIndexing = ({ await event.eventCallback.fn(indexingFunctionArg); + // Note: Check `getError` to handle user-code catching errors + // from the indexing store. + + if (indexingErrorHandler.getError()) { + const error = indexingErrorHandler.getError()!; + indexingErrorHandler.clearError(); + throw error; + } + common.metrics.ponder_indexing_function_duration.observe( metricLabels[event.eventCallback.name]!, endClock(), ); - - // Note: Check `getRetryableError` to handle user-code catching errors - // from the indexing store. - - if (indexingErrorHandler.getRetryableError()) { - const retryableError = indexingErrorHandler.getRetryableError()!; - indexingErrorHandler.clearRetryableError(); - throw retryableError; - } } catch (_error) { - let error = _error instanceof Error ? _error : new Error(String(_error)); + if (common.shutdown.isKilled) { + throw new ShutdownError(); + } - // Note: Use `getRetryableError` rather than `error` to avoid + // Note: Use `getError` rather than `error` to avoid // issues with the user-code augmenting errors from the indexing store. - if (indexingErrorHandler.getRetryableError()) { - const retryableError = indexingErrorHandler.getRetryableError()!; - indexingErrorHandler.clearRetryableError(); - error = retryableError; + let error: IndexingFunctionError; + if (indexingErrorHandler.getError()) { + error = new IndexingFunctionError(undefined, { + cause: indexingErrorHandler.getError()!, + }); + indexingErrorHandler.clearError(); + } else { + error = new IndexingFunctionError(undefined, { + cause: _error as Error, + }); } - if (common.shutdown.isKilled) { - throw new ShutdownError(); - } - - if (error instanceof InvalidEventAccessError) { - throw error; + if (error.cause instanceof InvalidEventAccessError) { + throw error.cause; } + // Copy the stack from the inner error. + error.stack = error.cause.stack; addStackTrace(error, common.options); addErrorMeta(error, toErrorMeta(event)); @@ -332,10 +339,6 @@ export const createIndexing = ({ common.metrics.hasError = true; - if (error instanceof BaseError === false) { - error = new IndexingFunctionError(error.message); - } - throw error; } }; @@ -785,7 +788,7 @@ export const createEventProxy = < resetFilterInclude(eventName); // @ts-expect-error const error = new InvalidEventAccessError(`${type}.${prop}`); - indexingErrorHandler.setRetryableError(error); + indexingErrorHandler.setError(error); throw error; } @@ -817,7 +820,7 @@ export const createEventProxy = < resetFilterInclude(eventName); // @ts-expect-error const error = new InvalidEventAccessError(`${type}.${prop}`); - indexingErrorHandler.setRetryableError(error); + indexingErrorHandler.setError(error); throw error; } @@ -842,7 +845,7 @@ export const createEventProxy = < resetFilterInclude(eventName); // @ts-expect-error const error = new InvalidEventAccessError(`${type}.${prop}`); - indexingErrorHandler.setRetryableError(error); + indexingErrorHandler.setError(error); throw error; } diff --git a/packages/core/src/internal/errors.ts b/packages/core/src/internal/errors.ts index 1f50b8c86..708baaba8 100644 --- a/packages/core/src/internal/errors.ts +++ b/packages/core/src/internal/errors.ts @@ -1,36 +1,22 @@ +import type { getLogsRetryHelper } from "@ponder/utils"; + /** Base class for all known errors. */ -export class BaseError extends Error { +export class BaseError< + cause extends Error | undefined = undefined, +> extends Error { override name = "BaseError"; + override cause: cause; meta: string[] = []; - constructor(message?: string | undefined, { cause }: { cause?: Error } = {}) { + constructor(message?: string | undefined, { cause }: { cause?: cause } = {}) { super(message, { cause }); + this.cause = cause as cause; Object.setPrototypeOf(this, BaseError.prototype); } } -/** Error caused by user code. Should not be retried. */ -export class NonRetryableUserError extends BaseError { - override name = "NonRetryableUserError"; - - constructor(message?: string | undefined, { cause }: { cause?: Error } = {}) { - super(message, { cause }); - Object.setPrototypeOf(this, NonRetryableUserError.prototype); - } -} - -/** Error that may succeed if tried again. */ -export class RetryableError extends BaseError { - override name = "RetryableError"; - - constructor(message?: string | undefined, { cause }: { cause?: Error } = {}) { - super(message, { cause }); - Object.setPrototypeOf(this, RetryableError.prototype); - } -} - -export class ShutdownError extends NonRetryableUserError { +export class ShutdownError extends BaseError { override name = "ShutdownError"; constructor(message?: string | undefined) { @@ -39,190 +25,223 @@ export class ShutdownError extends NonRetryableUserError { } } -export class BuildError extends NonRetryableUserError { +export class BuildError< + cause extends Error | undefined = undefined, +> extends BaseError { override name = "BuildError"; - constructor(message?: string | undefined) { - super(message); + constructor(message?: string | undefined, { cause }: { cause?: cause } = {}) { + super(message, { cause }); Object.setPrototypeOf(this, BuildError.prototype); } } -export class MigrationError extends NonRetryableUserError { - override name = "MigrationError"; - - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, MigrationError.prototype); - } -} - -// Non-retryable database errors - -export class UniqueConstraintError extends NonRetryableUserError { - override name = "UniqueConstraintError"; - - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, UniqueConstraintError.prototype); - } -} +export class ExecuteFileError< + cause extends Error | undefined = undefined, +> extends BaseError { + override name = "ExecuteFileError"; -export class NotNullConstraintError extends NonRetryableUserError { - override name = "NotNullConstraintError"; - - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, NotNullConstraintError.prototype); - } -} - -export class InvalidStoreAccessError extends NonRetryableUserError { - override name = "InvalidStoreAccessError"; - - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, InvalidStoreAccessError.prototype); + constructor(message?: string | undefined, { cause }: { cause?: cause } = {}) { + super(message, { cause }); + Object.setPrototypeOf(this, ExecuteFileError.prototype); } } -export class RecordNotFoundError extends NonRetryableUserError { - override name = "RecordNotFoundError"; +export class RpcRequestError< + cause extends Error | undefined = undefined, +> extends BaseError { + override name = "RpcRequestError"; - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, RecordNotFoundError.prototype); + constructor(message?: string | undefined, { cause }: { cause?: cause } = {}) { + super(message, { cause }); + Object.setPrototypeOf(this, RpcRequestError.prototype); + } +} + +export class EthGetLogsRangeError extends BaseError< + RpcRequestError +> { + override name = "EthGetLogsRangeError"; + override cause: RpcRequestError; + isSuggestedRange: Extract< + ReturnType, + { shouldRetry: true } + >["isSuggestedRange"]; + ranges: Extract< + ReturnType, + { shouldRetry: true } + >["ranges"]; + + constructor( + { cause }: { cause: RpcRequestError }, + params: Extract< + ReturnType, + { shouldRetry: true } + >, + ) { + super(undefined, { cause }); + this.cause = cause; + this.isSuggestedRange = params.isSuggestedRange; + this.ranges = params.ranges; + Object.setPrototypeOf(this, EthGetLogsRangeError.prototype); + } +} + +export class QueryBuilderError< + cause extends Error | undefined = undefined, +> extends BaseError { + override name = "QueryBuilderError"; + + constructor(message?: string | undefined, { cause }: { cause?: cause } = {}) { + super(message, { cause }); + Object.setPrototypeOf(this, QueryBuilderError.prototype); } } -export class CheckConstraintError extends NonRetryableUserError { - override name = "CheckConstraintError"; +/** + * Error caused by an individual `qb.wrap` statement inside + * of a `qb.transaction` callback. + */ +export class TransactionStatementError< + cause extends Error | undefined = undefined, +> extends BaseError { + override name = "TransactionStatementError"; - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, CheckConstraintError.prototype); + constructor(message?: string | undefined, { cause }: { cause?: cause } = {}) { + super(message, { cause }); + Object.setPrototypeOf(this, TransactionStatementError.prototype); } } -// Retryable database errors - -export class DbConnectionError extends RetryableError { - override name = "DbConnectionError"; +/** + * Error thrown from a `qb.transaction` callback not caused by a `qb.wrap` statement. + */ +export class TransactionCallbackError< + cause extends Error, +> extends BaseError { + override name = "TransactionCallbackError"; - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, DbConnectionError.prototype); + constructor(message?: string | undefined, { cause }: { cause?: cause } = {}) { + super(message, { cause }); + Object.setPrototypeOf(this, TransactionCallbackError.prototype); } } -export class TransactionStatementError extends RetryableError { - override name = "TransactionStatementError"; +export class DelayedInsertError< + cause extends Error | undefined = undefined, +> extends BaseError { + override name = "DelayedInsertError"; - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, TransactionStatementError.prototype); + constructor(message?: string | undefined, { cause }: { cause?: cause } = {}) { + super(message, { cause }); + Object.setPrototypeOf(this, DelayedInsertError.prototype); } } -export class CopyFlushError extends RetryableError { - override name = "CopyFlushError"; +export class IndexingDBError< + cause extends Error | undefined = undefined, +> extends BaseError { + override name = "IndexingDBError"; - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, CopyFlushError.prototype); + constructor(message?: string | undefined, { cause }: { cause?: cause } = {}) { + super(message, { cause }); + Object.setPrototypeOf(this, IndexingDBError.prototype); } } -export class InvalidEventAccessError extends RetryableError { - override name = "InvalidEventAccessError"; - key: string; - - constructor(key: string, message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, InvalidEventAccessError.prototype); +export class RawSqlError< + cause extends Error | undefined = undefined, +> extends BaseError { + override name = "RawSqlError"; - this.key = key; + constructor(message?: string | undefined, { cause }: { cause?: cause } = {}) { + super(message, { cause }); + Object.setPrototypeOf(this, RawSqlError.prototype); } } -// Non-retryable indexing store errors - -export class InvalidStoreMethodError extends NonRetryableUserError { - override name = "InvalidStoreMethodError"; +export class BigIntSerializationError< + cause extends Error | undefined = undefined, +> extends BaseError { + override name = "BigIntSerializationError"; - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, InvalidStoreMethodError.prototype); + constructor(message?: string | undefined, { cause }: { cause?: cause } = {}) { + super(message, { cause }); + Object.setPrototypeOf(this, BigIntSerializationError.prototype); } } -export class UndefinedTableError extends NonRetryableUserError { - override name = "UndefinedTableError"; +/** + * @dev `stack` property points to the user code that caused the error. + */ +export class ServerError< + cause extends Error | undefined = undefined, +> extends BaseError { + override name = "ServerError"; - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, UndefinedTableError.prototype); + constructor(message?: string | undefined, { cause }: { cause?: cause } = {}) { + super(message, { cause }); + Object.setPrototypeOf(this, ServerError.prototype); } } -export class BigIntSerializationError extends NonRetryableUserError { - override name = "BigIntSerializationError"; +/** + * @dev `stack` property points to the user code that caused the error. + */ +export class IndexingFunctionError< + cause extends Error | undefined = undefined, +> extends BaseError { + override name = "IndexingFunctionError"; - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, BigIntSerializationError.prototype); + constructor(message?: string | undefined, { cause }: { cause?: cause } = {}) { + super(message, { cause }); + Object.setPrototypeOf(this, IndexingFunctionError.prototype); } } -export class DelayedInsertError extends NonRetryableUserError { - override name = "DelayedInsertError"; +/** + * Error throw when an `event` property is unexpectedly accessed in an indexing function. + */ +export class InvalidEventAccessError extends BaseError { + override name = "InvalidEventAccessError"; + key: string; - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, DelayedInsertError.prototype); + constructor(key: string) { + super(); + this.key = key; + Object.setPrototypeOf(this, InvalidEventAccessError.prototype); } } -export class RawSqlError extends NonRetryableUserError { - override name = "RawSqlError"; +export class MigrationError< + cause extends Error | undefined = undefined, +> extends BaseError { + override name = "MigrationError"; - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, RawSqlError.prototype); + constructor(message?: string | undefined, { cause }: { cause?: cause } = {}) { + super(message, { cause }); + Object.setPrototypeOf(this, MigrationError.prototype); } } -export class IndexingFunctionError extends NonRetryableUserError { - override name = "IndexingFunctionError"; +/** + * Returns true if the error is derived from a logical error in user code. + * @dev `instanceof` is not used because it doesn't work with serialized errors + * from threads. + */ +export function isUserDerivedError(error: BaseError): boolean { + if (error.name === BuildError.name) return true; + if (error.name === ExecuteFileError.name) return true; + if (error.name === MigrationError.name) return true; + if (error.name === IndexingDBError.name) return true; + if (error.name === BigIntSerializationError.name) return true; + if (error.name === RawSqlError.name) return true; + if (error.name === DelayedInsertError.name) return true; + if (error.name === IndexingFunctionError.name) return true; - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, IndexingFunctionError.prototype); + if ("cause" in error) { + // @ts-ignore + if (isUserDerivedError(error.cause)) return true; } + return false; } - -export class RpcProviderError extends BaseError { - override name = "RpcProviderError"; - - constructor(message?: string | undefined) { - super(message); - Object.setPrototypeOf(this, RpcProviderError.prototype); - } -} - -export const nonRetryableUserErrorNames = [ - ShutdownError, - BuildError, - MigrationError, - UniqueConstraintError, - NotNullConstraintError, - InvalidStoreAccessError, - RecordNotFoundError, - CheckConstraintError, - InvalidStoreMethodError, - UndefinedTableError, - BigIntSerializationError, - DelayedInsertError, - RawSqlError, - IndexingFunctionError, -].map((err) => err.name); diff --git a/packages/core/src/internal/logger.ts b/packages/core/src/internal/logger.ts index 691e12581..d23a69c80 100644 --- a/packages/core/src/internal/logger.ts +++ b/packages/core/src/internal/logger.ts @@ -1,7 +1,18 @@ +import { + ESBuildBuildError, + ESBuildContextError, + ESBuildTransformError, +} from "@/build/stacktrace.js"; import type { Prettify } from "@/types/utils.js"; import { formatEta } from "@/utils/format.js"; import pc from "picocolors"; import { type DestinationStream, type LevelWithSilent, pino } from "pino"; +import { + BaseError, + ExecuteFileError, + IndexingFunctionError, + ServerError, +} from "./errors.js"; export type LogMode = "pretty" | "json"; export type LogLevel = Prettify; @@ -70,10 +81,17 @@ export function createLogger({ options: T, printKeys?: (keyof T)[], ) { + if (options.msg === undefined) { + console.trace(); + } if (mode === "pretty" && printKeys) { // @ts-ignore options[PRINT_KEYS] = printKeys; } + if (options.error && options.error instanceof Error) { + stripErrorStack(options.error); + populateErrorMessageAndStack; + } logger.error(options); }, warn>( @@ -84,6 +102,11 @@ export function createLogger({ // @ts-ignore options[PRINT_KEYS] = printKeys; } + if (options.error && options.error instanceof Error) { + stripErrorStack(options.error); + populateErrorMessageAndStack(options.error); + } + logger.warn(options); }, info>( @@ -94,6 +117,10 @@ export function createLogger({ // @ts-ignore options[PRINT_KEYS] = printKeys; } + if (options.error && options.error instanceof Error) { + stripErrorStack(options.error); + populateErrorMessageAndStack; + } logger.info(options); }, debug>( @@ -104,6 +131,11 @@ export function createLogger({ // @ts-ignore options[PRINT_KEYS] = printKeys; } + if (options.error && options.error instanceof Error) { + stripErrorStack(options.error); + populateErrorMessageAndStack; + } + logger.debug(options); }, trace>( @@ -114,6 +146,10 @@ export function createLogger({ // @ts-ignore options[PRINT_KEYS] = printKeys; } + if (options.error && options.error instanceof Error) { + stripErrorStack(options.error); + populateErrorMessageAndStack; + } logger.trace(options); }, child: (bindings) => _createLogger(logger.child(bindings)), @@ -241,12 +277,41 @@ const format = (log: Log) => { prettyLog.push(`${log.error.name}: ${log.error.message}`); } - if (typeof log.error === "object" && "where" in log.error) { - prettyLog.push(`where: ${log.error.where as string}`); - } if (typeof log.error === "object" && "meta" in log.error) { - prettyLog.push(log.error.meta as string); + // @ts-ignore + prettyLog.push(log.error.meta); } } return prettyLog.join("\n"); }; + +function stripErrorStack(error: Error): void { + if (shouldPrintErrorStack(error) === false) { + error.stack = undefined; + } + if (error instanceof BaseError && error.cause) { + stripErrorStack(error.cause); + } +} + +function populateErrorMessageAndStack(error: Error): void { + if (error.message === undefined || error.message === "") { + error.message = error.name; + } + if (error.stack === undefined) { + error.stack = error.message; + } + if (error instanceof BaseError && error.cause) { + populateErrorMessageAndStack(error.cause); + } +} + +function shouldPrintErrorStack(error: Error): boolean { + if (error instanceof ServerError) return true; + if (error instanceof IndexingFunctionError) return true; + if (error instanceof ExecuteFileError) return true; + if (error instanceof ESBuildTransformError) return true; + if (error instanceof ESBuildBuildError) return true; + if (error instanceof ESBuildContextError) return true; + return false; +} diff --git a/packages/core/src/internal/types.ts b/packages/core/src/internal/types.ts index acea95bbb..43af06a88 100644 --- a/packages/core/src/internal/types.ts +++ b/packages/core/src/internal/types.ts @@ -29,7 +29,6 @@ import type { Chain as ViemChain, Log as ViemLog, } from "viem"; -import type { RetryableError } from "./errors.js"; // Database @@ -388,11 +387,14 @@ export type Status = { // Indexing error handler +/** + * Object to prevent user code from swallowing internal ponder errors. + */ export type IndexingErrorHandler = { - getRetryableError: () => RetryableError | undefined; - setRetryableError: (error: RetryableError) => void; - clearRetryableError: () => void; - error: RetryableError | undefined; + getError: () => Error | undefined; + setError: (error: Error) => void; + clearError: () => void; + error: Error | undefined; }; // Seconds diff --git a/packages/core/src/rpc/actions.ts b/packages/core/src/rpc/actions.ts index 32ba7a8ea..df17e3c40 100644 --- a/packages/core/src/rpc/actions.ts +++ b/packages/core/src/rpc/actions.ts @@ -1,4 +1,4 @@ -import { RpcProviderError } from "@/internal/errors.js"; +import { RpcRequestError } from "@/internal/errors.js"; import type { LightBlock, SyncBlock, @@ -320,26 +320,24 @@ export const validateTransactionsAndBlock = ( ) => { for (const [index, transaction] of block.transactions.entries()) { if (block.hash !== transaction.blockHash) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The transaction at index ${index} of the 'block.transactions' array has a 'transaction.blockHash' of ${transaction.blockHash}, but the block itself has a 'block.hash' of ${block.hash}.`, ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (block.number !== transaction.blockNumber) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The transaction at index ${index} of the 'block.transactions' array has a 'transaction.blockNumber' of ${transaction.blockNumber} (${hexToNumber(transaction.blockNumber)}), but the block itself has a 'block.number' of ${block.number} (${hexToNumber(block.number)}).`, ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } } @@ -361,7 +359,7 @@ export const validateLogsAndBlock = ( >, ) => { if (block.logsBloom !== zeroLogsBloom && logs.length === 0) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The logs array has length 0, but the associated block has a non-empty 'block.logsBloom'.`, ); error.meta = [ @@ -369,7 +367,6 @@ export const validateLogsAndBlock = ( requestText(blockRequest), requestText(logsRequest), ]; - error.stack = undefined; throw error; } @@ -382,7 +379,7 @@ export const validateLogsAndBlock = ( for (const log of logs) { if (block.hash !== log.blockHash) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The log with 'logIndex' ${log.logIndex} (${hexToNumber(log.logIndex)}) has a 'log.blockHash' of ${log.blockHash}, but the associated block has a 'block.hash' of ${block.hash}.`, ); error.meta = [ @@ -390,12 +387,11 @@ export const validateLogsAndBlock = ( requestText(blockRequest), requestText(logsRequest), ]; - error.stack = undefined; throw error; } if (block.number !== log.blockNumber) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The log with 'logIndex' ${log.logIndex} (${hexToNumber(log.logIndex)}) has a 'log.blockNumber' of ${log.blockNumber} (${hexToNumber(log.blockNumber)}), but the associated block has a 'block.number' of ${block.number} (${hexToNumber(block.number)}).`, ); error.meta = [ @@ -403,14 +399,13 @@ export const validateLogsAndBlock = ( requestText(blockRequest), requestText(logsRequest), ]; - error.stack = undefined; throw error; } if (log.transactionHash !== zeroHash) { const transaction = transactionByIndex.get(log.transactionIndex); if (transaction === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The log with 'logIndex' ${log.logIndex} (${hexToNumber(log.logIndex)}) has a 'log.transactionIndex' of ${log.transactionIndex} (${hexToNumber(log.transactionIndex)}), but the associated 'block.transactions' array does not contain a transaction matching that 'transactionIndex'.`, ); error.meta = [ @@ -418,10 +413,9 @@ export const validateLogsAndBlock = ( requestText(blockRequest), requestText(logsRequest), ]; - error.stack = undefined; throw error; } else if (transaction.hash !== log.transactionHash) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The log with 'logIndex' ${log.logIndex} (${hexToNumber(log.logIndex)}) matches a transaction in the associated 'block.transactions' array by 'transactionIndex' ${log.transactionIndex} (${hexToNumber(log.transactionIndex)}), but the log has a 'log.transactionHash' of ${log.transactionHash} while the transaction has a 'transaction.hash' of ${transaction.hash}.`, ); error.meta = [ @@ -429,7 +423,6 @@ export const validateLogsAndBlock = ( requestText(blockRequest), requestText(logsRequest), ]; - error.stack = undefined; throw error; } } @@ -454,7 +447,7 @@ export const validateTracesAndBlock = ( const transactionHashes = new Set(block.transactions.map((t) => t.hash)); for (const [index, trace] of traces.entries()) { if (transactionHashes.has(trace.transactionHash) === false) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The top-level trace at array index ${index} has a 'transactionHash' of ${trace.transactionHash}, but the associated 'block.transactions' array does not contain a transaction matching that hash.`, ); error.meta = [ @@ -462,14 +455,13 @@ export const validateTracesAndBlock = ( requestText(blockRequest), requestText(tracesRequest), ]; - error.stack = undefined; throw error; } } // Use the fact that any transaction produces a trace to validate. if (block.transactions.length !== 0 && traces.length === 0) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The traces array has length 0, but the associated 'block.transactions' array has length ${block.transactions.length}.`, ); error.meta = [ @@ -477,7 +469,6 @@ export const validateTracesAndBlock = ( requestText(blockRequest), requestText(tracesRequest), ]; - error.stack = undefined; throw error; } }; @@ -506,7 +497,7 @@ export const validateReceiptsAndBlock = ( for (const [index, receipt] of receipts.entries()) { if (block.hash !== receipt.blockHash) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The receipt at array index ${index} has a 'receipt.blockHash' of ${receipt.blockHash}, but the associated block has a 'block.hash' of ${block.hash}.`, ); error.meta = [ @@ -514,12 +505,11 @@ export const validateReceiptsAndBlock = ( requestText(blockRequest), requestText(receiptsRequest), ]; - error.stack = undefined; throw error; } if (block.number !== receipt.blockNumber) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The receipt at array index ${index} has a 'receipt.blockNumber' of ${receipt.blockNumber} (${hexToNumber(receipt.blockNumber)}), but the associated block has a 'block.number' of ${block.number} (${hexToNumber(block.number)}).`, ); error.meta = [ @@ -527,13 +517,12 @@ export const validateReceiptsAndBlock = ( requestText(blockRequest), requestText(receiptsRequest), ]; - error.stack = undefined; throw error; } const transaction = transactionByIndex.get(receipt.transactionIndex); if (transaction === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The receipt at array index ${index} has a 'receipt.transactionIndex' of ${receipt.transactionIndex} (${hexToNumber(receipt.transactionIndex)}), but the associated 'block.transactions' array does not contain a transaction matching that 'transactionIndex'.`, ); error.meta = [ @@ -541,10 +530,9 @@ export const validateReceiptsAndBlock = ( requestText(blockRequest), requestText(receiptsRequest), ]; - error.stack = undefined; throw error; } else if (transaction.hash !== receipt.transactionHash) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The receipt at array index ${index} matches a transaction in the associated 'block.transactions' array by 'transactionIndex' ${receipt.transactionIndex} (${hexToNumber(receipt.transactionIndex)}), but the receipt has a 'receipt.transactionHash' of ${receipt.transactionHash} while the transaction has a 'transaction.hash' of ${transaction.hash}.`, ); error.meta = [ @@ -552,7 +540,6 @@ export const validateReceiptsAndBlock = ( requestText(blockRequest), requestText(receiptsRequest), ]; - error.stack = undefined; throw error; } } @@ -561,7 +548,7 @@ export const validateReceiptsAndBlock = ( receiptsRequest.method === "eth_getBlockReceipts" && block.transactions.length !== receipts.length ) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The receipts array has length ${receipts.length}, but the associated 'block.transactions' array has length ${block.transactions.length}.`, ); error.meta = [ @@ -569,7 +556,6 @@ export const validateReceiptsAndBlock = ( requestText(blockRequest), requestText(receiptsRequest), ]; - error.stack = undefined; throw error; } }; @@ -617,58 +603,53 @@ export const standardizeBlock = < ): block extends SyncBlock ? SyncBlock : SyncBlockHeader => { // required properties if (block.hash === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'block.hash' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (block.number === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'block.number' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (block.timestamp === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'block.timestamp' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (block.logsBloom === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'block.logsBloom' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (block.parentHash === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'block.parentHash' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } @@ -714,25 +695,23 @@ export const standardizeBlock = < } if (hexToBigInt(block.number) > PG_BIGINT_MAX) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Invalid RPC response: 'block.number' (${hexToBigInt(block.number)}) is larger than the maximum allowed value (${PG_BIGINT_MAX}).`, ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (hexToBigInt(block.timestamp) > PG_BIGINT_MAX) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Invalid RPC response: 'block.timestamp' (${hexToBigInt(block.timestamp)}) is larger than the maximum allowed value (${PG_BIGINT_MAX}).`, ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } @@ -791,14 +770,13 @@ export const standardizeTransactions = ( for (const transaction of transactions) { if (transactionIds.has(transaction.transactionIndex)) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The 'block.transactions' array contains two objects with a 'transactionIndex' of ${transaction.transactionIndex} (${hexToNumber(transaction.transactionIndex)}).`, ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } else { transactionIds.add(transaction.transactionIndex); @@ -806,58 +784,53 @@ export const standardizeTransactions = ( // required properties if (transaction.hash === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'transaction.hash' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (transaction.transactionIndex === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'transaction.transactionIndex' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (transaction.blockNumber === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'transaction.blockNumber' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (transaction.blockHash === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'transaction.blockHash' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (transaction.from === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'transaction.from' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } @@ -894,36 +867,33 @@ export const standardizeTransactions = ( } if (hexToBigInt(transaction.blockNumber) > PG_BIGINT_MAX) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Invalid RPC response: 'transaction.blockNumber' (${hexToBigInt(transaction.blockNumber)}) is larger than the maximum allowed value (${PG_BIGINT_MAX}).`, ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (hexToBigInt(transaction.transactionIndex) > BigInt(PG_INTEGER_MAX)) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Invalid RPC response: 'transaction.transactionIndex' (${hexToBigInt(transaction.transactionIndex)}) is larger than the maximum allowed value (${PG_INTEGER_MAX}).`, ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (hexToBigInt(transaction.nonce) > BigInt(PG_INTEGER_MAX)) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Invalid RPC response: 'transaction.nonce' (${hexToBigInt(transaction.nonce)}) is larger than the maximum allowed value (${PG_INTEGER_MAX}).`, ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } } @@ -953,7 +923,7 @@ export const standardizeLogs = ( const logIds = new Set(); for (const log of logs) { if (logIds.has(`${log.blockNumber}_${log.logIndex}`)) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The logs array contains two objects with 'blockNumber' ${log.blockNumber} (${hexToNumber(log.blockNumber)}) and 'logIndex' ${log.logIndex} (${hexToNumber(log.logIndex)}).`, ); error.meta = [ @@ -961,8 +931,6 @@ export const standardizeLogs = ( requestText(request), ]; - console.log(logs); - error.stack = undefined; throw error; } else { logIds.add(`${log.blockNumber}_${log.logIndex}`); @@ -970,69 +938,63 @@ export const standardizeLogs = ( // required properties if (log.blockNumber === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'log.blockNumber' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (log.logIndex === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'log.logIndex' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (log.blockHash === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'log.blockHash' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (log.address === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'log.address' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (log.topics === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'log.topics' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (log.data === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'log.data' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (log.transactionHash === undefined) { @@ -1048,36 +1010,33 @@ export const standardizeLogs = ( } if (hexToBigInt(log.blockNumber) > PG_BIGINT_MAX) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Invalid RPC response: 'log.blockNumber' (${hexToBigInt(log.blockNumber)}) is larger than the maximum allowed value (${PG_BIGINT_MAX}).`, ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (hexToBigInt(log.transactionIndex) > BigInt(PG_INTEGER_MAX)) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Invalid RPC response: 'log.transactionIndex' (${hexToBigInt(log.transactionIndex)}) is larger than the maximum allowed value (${PG_INTEGER_MAX}).`, ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (hexToBigInt(log.logIndex) > BigInt(PG_INTEGER_MAX)) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Invalid RPC response: 'log.logIndex' (${hexToBigInt(log.logIndex)}) is larger than the maximum allowed value (${PG_INTEGER_MAX}).`, ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } } @@ -1107,47 +1066,43 @@ export const standardizeTrace = ( ): SyncTrace => { // required properties if (trace.transactionHash === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'trace.transactionHash' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (trace.trace.type === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'trace.type' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (trace.trace.from === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'trace.from' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (trace.trace.input === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'trace.input' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } @@ -1196,14 +1151,13 @@ export const standardizeTransactionReceipts = ( const receiptIds = new Set(); for (const receipt of receipts) { if (receiptIds.has(receipt.transactionHash)) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Inconsistent RPC response data. The receipts array contains two objects with a 'transactionHash' of ${receipt.transactionHash}.`, ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } else { receiptIds.add(receipt.transactionHash); @@ -1211,69 +1165,63 @@ export const standardizeTransactionReceipts = ( // required properties if (receipt.blockHash === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'receipt.blockHash' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (receipt.blockNumber === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'receipt.blockNumber' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (receipt.transactionHash === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'receipt.transactionHash' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (receipt.transactionIndex === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'receipt.transactionIndex' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (receipt.from === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'receipt.from' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (receipt.status === undefined) { - const error = new RpcProviderError( + const error = new RpcRequestError( "Invalid RPC response: 'receipt.status' is a required property", ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } @@ -1307,25 +1255,23 @@ export const standardizeTransactionReceipts = ( } if (hexToBigInt(receipt.blockNumber) > PG_BIGINT_MAX) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Invalid RPC response: 'receipt.blockNumber' (${hexToBigInt(receipt.blockNumber)}) is larger than the maximum allowed value (${PG_BIGINT_MAX}).`, ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } if (hexToBigInt(receipt.transactionIndex) > BigInt(PG_INTEGER_MAX)) { - const error = new RpcProviderError( + const error = new RpcRequestError( `Invalid RPC response: 'receipt.transactionIndex' (${hexToBigInt(receipt.transactionIndex)}) is larger than the maximum allowed value (${PG_INTEGER_MAX}).`, ); error.meta = [ "Please report this error to the RPC operator.", requestText(request), ]; - error.stack = undefined; throw error; } } diff --git a/packages/core/src/rpc/index.ts b/packages/core/src/rpc/index.ts index 561d3b564..43f3d5fed 100644 --- a/packages/core/src/rpc/index.ts +++ b/packages/core/src/rpc/index.ts @@ -1,6 +1,11 @@ import crypto, { type UUID } from "node:crypto"; import url from "node:url"; import type { Common } from "@/internal/common.js"; +import { + EthGetLogsRangeError, + RpcRequestError, + ShutdownError, +} from "@/internal/errors.js"; import type { Logger } from "@/internal/logger.js"; import type { Chain, SyncBlock, SyncBlockHeader } from "@/internal/types.js"; import { eth_getBlockByNumber, standardizeBlock } from "@/rpc/actions.js"; @@ -22,7 +27,6 @@ import { MethodNotSupportedRpcError, ParseRpcError, type PublicRpcSchema, - type RpcError, type RpcTransactionReceipt, TimeoutError, custom, @@ -542,8 +546,14 @@ export const createRpc = ({ bucket.reactivationDelay = INITIAL_REACTIVATION_DELAY; return response as RequestReturnType; - } catch (e) { - const error = e as Error; + } catch (_error) { + const error = new RpcRequestError(undefined, { + cause: _error as Error, + }); + + if (common.shutdown.isKilled) { + throw new ShutdownError(); + } common.metrics.ponder_rpc_request_error_total.inc( { method: body.method, chain: chain.name }, @@ -558,7 +568,7 @@ export const createRpc = ({ ) { const getLogsErrorResponse = getLogsRetryHelper({ params: body.params as GetLogsRetryHelperParameters["params"], - error: error as RpcError, + error, }); if (getLogsErrorResponse.shouldRetry) { @@ -571,12 +581,14 @@ export const createRpc = ({ method: body.method, request: JSON.stringify(body), retry_ranges: JSON.stringify(getLogsErrorResponse.ranges), - error: error as Error, + error, }); - throw error; + throw new EthGetLogsRangeError( + { cause: error }, + getLogsErrorResponse, + ); } else { - // @ts-ignore error.meta = [ "Tip: Use the ethGetLogsBlockRange option to override the default behavior for this chain", ]; @@ -585,52 +597,44 @@ export const createRpc = ({ addLatency(bucket, endClock(), false); - if ( - // @ts-ignore - error.code === 429 || - // @ts-ignore - error.status === 429 || - error instanceof TimeoutError - ) { - if (bucket.isActive) { - bucket.isActive = false; - bucket.isWarmingUp = false; + if (shouldRateLimit(error.cause) && bucket.isActive) { + bucket.isActive = false; + bucket.isWarmingUp = false; - bucket.rpsLimit = Math.max( - bucket.rpsLimit * RPS_DECREASE_FACTOR, - MIN_RPS, - ); - bucket.consecutiveSuccessfulRequests = 0; + bucket.rpsLimit = Math.max( + bucket.rpsLimit * RPS_DECREASE_FACTOR, + MIN_RPS, + ); + bucket.consecutiveSuccessfulRequests = 0; - common.logger.debug({ - msg: "JSON-RPC provider rate limited", + common.logger.debug({ + msg: "JSON-RPC provider rate limited", + chain: chain.name, + chain_id: chain.id, + hostname: bucket.hostname, + rps_limit: Math.floor(bucket.rpsLimit), + }); + + scheduleBucketActivation(bucket); + + if (buckets.every((b) => b.isActive === false)) { + logger.warn({ + msg: "All JSON-RPC providers are inactive", chain: chain.name, chain_id: chain.id, - hostname: bucket.hostname, - rps_limit: Math.floor(bucket.rpsLimit), }); - - scheduleBucketActivation(bucket); - - if (buckets.every((b) => b.isActive === false)) { - logger.warn({ - msg: "All JSON-RPC providers are inactive", - chain: chain.name, - chain_id: chain.id, - }); - } - - bucket.reactivationDelay = - error instanceof TimeoutError - ? INITIAL_REACTIVATION_DELAY - : Math.min( - bucket.reactivationDelay * BACKOFF_FACTOR, - MAX_REACTIVATION_DELAY, - ); } + + bucket.reactivationDelay = + error.cause instanceof TimeoutError + ? INITIAL_REACTIVATION_DELAY + : Math.min( + bucket.reactivationDelay * BACKOFF_FACTOR, + MAX_REACTIVATION_DELAY, + ); } - if (shouldRetry(error) === false) { + if (shouldRetry(error.cause) === false) { logger.warn({ msg: "Received JSON-RPC error", chain: chain.name, @@ -957,3 +961,12 @@ function shouldRetry(error: Error) { } return true; } + +function shouldRateLimit(error: Error) { + // @ts-ignore + if (error.code === 429) return true; + // @ts-ignore + if (error.status === 429) return true; + if (error instanceof TimeoutError) return true; + return false; +} diff --git a/packages/core/src/runtime/historical.ts b/packages/core/src/runtime/historical.ts index 01b628825..39a8e8cdd 100644 --- a/packages/core/src/runtime/historical.ts +++ b/packages/core/src/runtime/historical.ts @@ -1,6 +1,6 @@ import type { Database } from "@/database/index.js"; import type { Common } from "@/internal/common.js"; -import { ShutdownError } from "@/internal/errors.js"; +// import { ShutdownError } from "@/internal/errors.js"; import type { Chain, CrashRecoveryCheckpoint, @@ -1286,8 +1286,8 @@ export async function* getLocalSyncGenerator(params: { params.common.options.command === "dev" ? 10_000 : 50_000, ); - closestToTipBlock = await params.database.syncQB - .transaction(async (tx) => { + closestToTipBlock = await params.database.syncQB.transaction( + async (tx) => { const syncStore = createSyncStore({ common: params.common, qb: tx }); const logs = await historicalSync.syncBlockRangeData({ interval, @@ -1328,22 +1328,8 @@ export async function* getLocalSyncGenerator(params: { } return closestToTipBlock; - }) - .catch((error) => { - if (error instanceof ShutdownError) { - throw error; - } - - params.common.logger.warn({ - msg: "Failed to fetch backfill JSON-RPC data", - chain: params.chain.name, - chain_id: params.chain.id, - block_range: JSON.stringify(interval), - duration: endClock(), - error, - }); - throw error; - }); + }, + ); clearTimeout(durationTimer); diff --git a/packages/core/src/runtime/isolated.ts b/packages/core/src/runtime/isolated.ts index 0b16b94f4..5d9a5ce7f 100644 --- a/packages/core/src/runtime/isolated.ts +++ b/packages/core/src/runtime/isolated.ts @@ -18,11 +18,7 @@ import { getEventCount, } from "@/indexing/index.js"; import type { Common } from "@/internal/common.js"; -import { - InvalidEventAccessError, - NonRetryableUserError, - type RetryableError, -} from "@/internal/errors.js"; +import { InvalidEventAccessError } from "@/internal/errors.js"; import type { CrashRecoveryCheckpoint, IndexingBuild, @@ -93,16 +89,16 @@ export async function runIsolated({ }); const indexingErrorHandler: IndexingErrorHandler = { - getRetryableError: () => { + getError: () => { return indexingErrorHandler.error; }, - setRetryableError: (error: RetryableError) => { + setError: (error: Error) => { indexingErrorHandler.error = error; }, - clearRetryableError: () => { + clearError: () => { indexingErrorHandler.error = undefined; }, - error: undefined as RetryableError | undefined, + error: undefined as Error | undefined, }; const indexingCache = createIndexingCache({ @@ -405,15 +401,16 @@ export async function runIsolated({ indexingCache.invalidate(); indexingCache.clear(); + common.logger.warn({ + msg: "Failed to index block range", + chain: chain.name, + chain_id: chain.id, + block_range: JSON.stringify(blockRange), + duration: indexStartClock(), + error: error as Error, + }); + if (error instanceof InvalidEventAccessError) { - common.logger.debug({ - msg: "Failed to index block range", - chain: chain.name, - chain_id: chain.id, - block_range: JSON.stringify(blockRange), - duration: indexStartClock(), - error, - }); events = await refetchHistoricalEvents({ common, indexingBuild, @@ -421,15 +418,6 @@ export async function runIsolated({ syncStore, events, }); - } else if (error instanceof NonRetryableUserError === false) { - common.logger.warn({ - msg: "Failed to index block range", - chain: chain.name, - chain_id: chain.id, - block_range: JSON.stringify(blockRange), - duration: indexStartClock(), - error: error as Error, - }); } throw error; @@ -623,15 +611,13 @@ export async function runIsolated({ } catch (error) { indexingCache.clear(); - if (error instanceof NonRetryableUserError === false) { - common.logger.warn({ - msg: "Failed to index block", - chain: chain.name, - chain_id: chain.id, - number: Number(decodeCheckpoint(checkpoint).blockNumber), - error: error, - }); - } + common.logger.warn({ + msg: "Failed to index block", + chain: chain.name, + chain_id: chain.id, + number: Number(decodeCheckpoint(checkpoint).blockNumber), + error: error, + }); throw error; } diff --git a/packages/core/src/runtime/multichain.ts b/packages/core/src/runtime/multichain.ts index 37dbba9fc..abb9f1ed8 100644 --- a/packages/core/src/runtime/multichain.ts +++ b/packages/core/src/runtime/multichain.ts @@ -24,11 +24,7 @@ import { getEventCount, } from "@/indexing/index.js"; import type { Common } from "@/internal/common.js"; -import { - InvalidEventAccessError, - NonRetryableUserError, - type RetryableError, -} from "@/internal/errors.js"; +import { InvalidEventAccessError } from "@/internal/errors.js"; import { getAppProgress } from "@/internal/metrics.js"; import type { Chain, @@ -105,16 +101,16 @@ export async function runMultichain({ }); const indexingErrorHandler: IndexingErrorHandler = { - getRetryableError: () => { + getError: () => { return indexingErrorHandler.error; }, - setRetryableError: (error: RetryableError) => { + setError: (error: Error) => { indexingErrorHandler.error = error; }, - clearRetryableError: () => { + clearError: () => { indexingErrorHandler.error = undefined; }, - error: undefined as RetryableError | undefined, + error: undefined as Error | undefined, }; const indexingCache = createIndexingCache({ @@ -460,15 +456,16 @@ export async function runMultichain({ indexingCache.invalidate(); indexingCache.clear(); + common.logger.warn({ + msg: "Failed to index block range", + chain: chain.name, + chain_id: chain.id, + block_range: JSON.stringify(blockRange), + duration: indexStartClock(), + error: error as Error, + }); + if (error instanceof InvalidEventAccessError) { - common.logger.debug({ - msg: "Failed to index block range", - chain: chain.name, - chain_id: chain.id, - block_range: JSON.stringify(blockRange), - duration: indexStartClock(), - error, - }); events = await refetchHistoricalEvents({ common, indexingBuild, @@ -476,15 +473,6 @@ export async function runMultichain({ syncStore, events, }); - } else if (error instanceof NonRetryableUserError === false) { - common.logger.warn({ - msg: "Failed to index block range", - chain: chain.name, - chain_id: chain.id, - block_range: JSON.stringify(blockRange), - duration: indexStartClock(), - error: error as Error, - }); } throw error; @@ -710,15 +698,13 @@ export async function runMultichain({ } catch (error) { indexingCache.clear(); - if (error instanceof NonRetryableUserError === false) { - common.logger.warn({ - msg: "Failed to index block", - chain: chain.name, - chain_id: chain.id, - number: Number(decodeCheckpoint(checkpoint).blockNumber), - error: error, - }); - } + common.logger.warn({ + msg: "Failed to index block", + chain: chain.name, + chain_id: chain.id, + number: Number(decodeCheckpoint(checkpoint).blockNumber), + error: error, + }); throw error; } diff --git a/packages/core/src/runtime/omnichain.ts b/packages/core/src/runtime/omnichain.ts index 093946d73..d35fb08ee 100644 --- a/packages/core/src/runtime/omnichain.ts +++ b/packages/core/src/runtime/omnichain.ts @@ -24,11 +24,7 @@ import { getEventCount, } from "@/indexing/index.js"; import type { Common } from "@/internal/common.js"; -import { - InvalidEventAccessError, - NonRetryableUserError, - type RetryableError, -} from "@/internal/errors.js"; +import { InvalidEventAccessError } from "@/internal/errors.js"; import { getAppProgress } from "@/internal/metrics.js"; import type { Chain, @@ -108,16 +104,16 @@ export async function runOmnichain({ }); const indexingErrorHandler: IndexingErrorHandler = { - getRetryableError: () => { + getError: () => { return indexingErrorHandler.error; }, - setRetryableError: (error: RetryableError) => { + setError: (error: Error) => { indexingErrorHandler.error = error; }, - clearRetryableError: () => { + clearError: () => { indexingErrorHandler.error = undefined; }, - error: undefined as RetryableError | undefined, + error: undefined as Error | undefined, }; const indexingCache = createIndexingCache({ @@ -479,12 +475,13 @@ export async function runOmnichain({ indexingCache.invalidate(); indexingCache.clear(); + common.logger.warn({ + msg: "Failed to index block range", + duration: indexStartClock(), + error: error as Error, + }); + if (error instanceof InvalidEventAccessError) { - common.logger.debug({ - msg: "Failed to index block range", - duration: indexStartClock(), - error, - }); events = await refetchHistoricalEvents({ common, indexingBuild, @@ -492,12 +489,6 @@ export async function runOmnichain({ syncStore, events, }); - } else if (error instanceof NonRetryableUserError === false) { - common.logger.warn({ - msg: "Failed to index block range", - duration: indexStartClock(), - error: error as Error, - }); } throw error; @@ -728,15 +719,13 @@ export async function runOmnichain({ } catch (error) { indexingCache.clear(); - if (error instanceof NonRetryableUserError === false) { - common.logger.warn({ - msg: "Failed to index block", - chain: chain.name, - chain_id: chain.id, - number: Number(decodeCheckpoint(checkpoint).blockNumber), - error: error, - }); - } + common.logger.warn({ + msg: "Failed to index block", + chain: chain.name, + chain_id: chain.id, + number: Number(decodeCheckpoint(checkpoint).blockNumber), + error: error, + }); throw error; } diff --git a/packages/core/src/server/error.ts b/packages/core/src/server/error.ts index 2b19caf4c..7a4848b91 100644 --- a/packages/core/src/server/error.ts +++ b/packages/core/src/server/error.ts @@ -1,12 +1,12 @@ import { addStackTrace } from "@/indexing/addStackTrace.js"; import type { Common } from "@/internal/common.js"; -import type { BaseError } from "@/internal/errors.js"; +import { ServerError } from "@/internal/errors.js"; import { prettyPrint } from "@/utils/print.js"; import type { Context, HonoRequest } from "hono"; import { html } from "hono/html"; export const onError = async (_error: Error, c: Context, common: Common) => { - const error = _error as BaseError; + const error = new ServerError(undefined, { cause: _error }); // Find the filename where the error occurred const regex = /(\S+\.(?:js|ts|mjs|cjs)):\d+:\d+/; diff --git a/packages/core/src/sync-historical/index.ts b/packages/core/src/sync-historical/index.ts index 2f4a334c0..0e1b9e145 100644 --- a/packages/core/src/sync-historical/index.ts +++ b/packages/core/src/sync-historical/index.ts @@ -1,4 +1,5 @@ import type { Common } from "@/internal/common.js"; +import { EthGetLogsRangeError } from "@/internal/errors.js"; import type { BlockFilter, Chain, @@ -54,13 +55,11 @@ import { import { promiseAllSettledWithThrow } from "@/utils/promiseAllSettledWithThrow.js"; import { createQueue } from "@/utils/queue.js"; import { startClock } from "@/utils/timer.js"; -import { getLogsRetryHelper } from "@ponder/utils"; import { type Address, type Hash, type Hex, type LogTopic, - type RpcError, hexToNumber, numberToHex, toHex, @@ -211,42 +210,29 @@ export const createHistoricalSync = ( throw error; } - const getLogsErrorResponse = getLogsRetryHelper({ - params: [ - { - address, - topics, - fromBlock: toHex(interval[0]), - toBlock: toHex(interval[1]), - }, - ], - error: error as RpcError, - }); - - if (getLogsErrorResponse.shouldRetry === false) throw error; + if (error instanceof EthGetLogsRangeError) { + const range = + hexToNumber(error.ranges[0]!.toBlock) - + hexToNumber(error.ranges[0]!.fromBlock); - const range = - hexToNumber(getLogsErrorResponse.ranges[0]!.toBlock) - - hexToNumber(getLogsErrorResponse.ranges[0]!.fromBlock); - - args.common.logger.debug({ - msg: "Updated eth_getLogs range", - chain: args.chain.name, - chain_id: args.chain.id, - range, - }); + args.common.logger.debug({ + msg: "Updated eth_getLogs range", + chain: args.chain.name, + chain_id: args.chain.id, + range, + }); - logsRequestMetadata = { - estimatedRange: range, - confirmedRange: getLogsErrorResponse.isSuggestedRange - ? range - : undefined, - }; + logsRequestMetadata = { + estimatedRange: range, + confirmedRange: error.isSuggestedRange ? range : undefined, + }; - return syncLogsDynamic( - { address, topic0, topic1, topic2, topic3, interval }, - context, - ); + return syncLogsDynamic( + { address, topic0, topic1, topic2, topic3, interval }, + context, + ); + } + throw error; }), ), ), diff --git a/packages/utils/src/getLogsRetryHelper.ts b/packages/utils/src/getLogsRetryHelper.ts index 28091608c..67dc48b93 100644 --- a/packages/utils/src/getLogsRetryHelper.ts +++ b/packages/utils/src/getLogsRetryHelper.ts @@ -2,13 +2,12 @@ import { type Address, type Hex, type LogTopic, - type RpcError, hexToBigInt, numberToHex, } from "viem"; export type GetLogsRetryHelperParameters = { - error: RpcError; + error: Error; params: [ { address?: Address | Address[]; @@ -109,6 +108,7 @@ export const getLogsRetryHelper = ({ // ankr match = sError.match("block range is too wide"); + // @ts-ignore if (match !== null && error.code === -32600) { const ranges = chunk({ params, range: 3000n });