diff --git a/config/cspell-ts.json b/config/cspell-ts.json index 8b9e2b047fe..6051a2dda36 100644 --- a/config/cspell-ts.json +++ b/config/cspell-ts.json @@ -24,6 +24,8 @@ } ], "words": [ + "logtape", + "logtapelogger", "bytelist", "bytestring", "binarytree", diff --git a/package-lock.json b/package-lock.json index f95aab3c626..53c47704cf8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -2526,6 +2526,24 @@ "win32" ] }, + "node_modules/@logtape/file": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@logtape/file/-/file-0.9.0.tgz", + "integrity": "sha512-ipZAyEbAEggOej2QPj2oF4h95gzPzIamQWZyBMuzZV0h+fthEDZrgp3UdZ4Cdvl1rzuLoU5nRx7h4iiJVFltQw==", + "funding": [ + "https://github.com/sponsors/dahlia" + ], + "license": "MIT" + }, + "node_modules/@logtape/logtape": { + "version": "0.9.0", + "resolved": "https://registry.npmjs.org/@logtape/logtape/-/logtape-0.9.0.tgz", + "integrity": "sha512-e4IlinGvjzp/+nSvsXB1OPSYNiuVEEJy8aMQqbveTcJoLVRsJK7nH0xVh/EdNTjRBoioJbUT/jzxaAifxf1VyA==", + "funding": [ + "https://github.com/sponsors/dahlia" + ], + "license": "MIT" + }, "node_modules/@msgpackr-extract/msgpackr-extract-darwin-arm64": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/@msgpackr-extract/msgpackr-extract-darwin-arm64/-/msgpackr-extract-darwin-arm64-3.0.3.tgz", @@ -17465,6 +17483,8 @@ "@ethereumjs/util": "10.0.0-rc.1", "@ethereumjs/vm": "10.0.0-rc.1", "@js-sdsl/ordered-map": "^4.4.2", + "@logtape/file": "^0.9.0", + "@logtape/logtape": "^0.9.0", "@multiformats/multiaddr": "^12.4.0", "@paulmillr/trusted-setups": "^0.1.2", "@polkadot/wasm-crypto": "^7.4.1", diff --git a/packages/client/bin/cli.ts b/packages/client/bin/cli.ts index de9fcd761cc..d90fec3339a 100755 --- a/packages/client/bin/cli.ts +++ b/packages/client/bin/cli.ts @@ -20,14 +20,66 @@ import type * as http from 'http' import type { Block, BlockBytes } from '@ethereumjs/block' import type { ConsensusDict } from '@ethereumjs/blockchain' import type { GenesisState } from '@ethereumjs/util' +import { getFileSink } from '@logtape/file' +import { + ansiColorFormatter, + configure, + getConsoleSink, + getLogger as getLogtapeLogger, +} from '@logtape/logtape' +import type { Logger as LogtapeLoggerType } from '@logtape/logtape' import type { AbstractLevel } from 'abstract-level' import type { Server as RPCServer } from 'jayson/promise/index.js' import type { Config } from '../src/config.ts' -import type { Logger } from '../src/logging.ts' import type { FullEthereumService } from '../src/service/index.ts' -import type { ClientOpts } from '../src/types.ts' +import type { ClientOpts, Logger } from '../src/types.ts' import type { RPCArgs } from './startRPC.ts' +export class LogtapeLogger implements Logger { + public logger: LogtapeLoggerType + + constructor(logger: LogtapeLoggerType) { + this.logger = logger + + // Bind methods for logger instance + this.info = this.info.bind(this) + this.warn = this.warn.bind(this) + this.error = this.error.bind(this) + this.debug = this.debug.bind(this) + } + + info(message: string, ...meta: any[]) { + this.logger?.info(`${message}`, ...meta) + } + + warn(message: string, ...meta: any[]) { + this.logger?.warn(`${message}`, ...meta) + } + + error(message: string, ...meta: any[]) { + this.logger?.error(`${message}`, ...meta) + } + + debug(message: string, ...meta: any[]) { + this.logger?.debug(`${message}`, ...meta) + } + + isInfoEnabled() { + const level = (this.logger as any).lowestLevel + return level === 'info' || level === 'debug' + } + + configure(_: { [key: string]: any }) { + console.warn( + 'Dynamic configuration is not supported in Logtapelogger. Please configure globally.', + ) + } + + getLevel() { + return (this.logger as any).lowestLevel + } +} + let logger: Logger const args: ClientOpts = getArgs() @@ -118,7 +170,7 @@ async function startBlock(client: EthereumClient) { } try { await client.chain.resetCanonicalHead(startBlock) - client.config.logger.info(`Chain height reset to ${client.chain.headers.height}`) + client.config.logger?.info(`Chain height reset to ${client.chain.headers.height}`) } catch (err: any) { throw EthereumJSErrorWithoutCode(`Error setting back chain in startBlock: ${err}`) } @@ -150,7 +202,7 @@ async function startExecutionFrom(client: EthereumClient) { try { await client.chain.blockchain.setIteratorHead('vm', startExecutionParent.hash()) await client.chain.update(false) - client.config.logger.info( + client.config.logger?.info( `vmHead set to ${client.chain.headers.height} for starting stateless execution at hardfork=${startExecutionHardfork}`, ) } catch (err: any) { @@ -162,7 +214,7 @@ async function startExecutionFrom(client: EthereumClient) { try { await client.chain.blockchain.setIteratorHead('vm', startExecutionParent.hash()) await client.chain.update(false) - client.config.logger.info( + client.config.logger?.info( `vmHead set to ${client.chain.headers.height} for starting stateful execution at hardfork=${startExecutionHardfork}`, ) } catch (err: any) { @@ -186,7 +238,8 @@ async function startClient( config: Config, genesisMeta: { genesisState?: GenesisState; genesisStateRoot?: Uint8Array } = {}, ) { - config.logger.info(`Data directory: ${config.datadir}`) + // TODO make sure all logger usage is preservable through newly created logger interface + config.logger?.info(`Data directory: ${config.datadir}`) const dbs = initDBs(config) @@ -243,13 +296,13 @@ async function startClient( }) blocks.push(block) buf = RLP.decode(buf.remainder, true) - config.logger.info( + config.logger?.info( `Preloading block hash=${short(bytesToHex(block.header.hash()))} number=${ block.header.number }`, ) } catch (err: any) { - config.logger.info( + config.logger?.info( `Encountered error while while preloading chain data error=${err.message}`, ) break @@ -310,28 +363,28 @@ const stopClient = async ( servers: (RPCServer | http.Server)[] } | null>, ) => { - config.logger.info('Caught interrupt signal. Obtaining client handle for clean shutdown...') - config.logger.info('(This might take a little longer if client not yet fully started)') + config.logger?.info('Caught interrupt signal. Obtaining client handle for clean shutdown...') + config.logger?.info('(This might take a little longer if client not yet fully started)') let timeoutHandle if (clientStartPromise?.toString().includes('Promise') === true) // Client hasn't finished starting up so setting timeout to terminate process if not already shutdown gracefully timeoutHandle = setTimeout(() => { - config.logger.warn('Client has become unresponsive while starting up.') - config.logger.warn('Check logging output for potential errors. Exiting...') + config.logger?.warn('Client has become unresponsive while starting up.') + config.logger?.warn('Check logging output for potential errors. Exiting...') process.exit(1) }, 30000) const clientHandle = await clientStartPromise if (clientHandle !== null) { - config.logger.info('Shutting down the client and the servers...') + config.logger?.info('Shutting down the client and the servers...') const { client, servers } = clientHandle for (const s of servers) { //@ts-expect-error jayson.Server type doesn't play well with ESM for some reason s['http'] !== undefined ? (s as RPCServer).http().close() : (s as http.Server).close() } await client.stop() - config.logger.info('Exiting.') + config.logger?.info('Exiting.') } else { - config.logger.info('Client did not start properly, exiting ...') + config.logger?.info('Client did not start properly, exiting ...') } clearTimeout(timeoutHandle) process.exit() @@ -346,10 +399,27 @@ async function run() { return helpRPC() } + // use Logtape logger with cli + const sinks: { [key: string]: any } = { + console: getConsoleSink({ formatter: ansiColorFormatter }), + } + if (typeof args.logFile === 'string') { + sinks.file = getFileSink(args.logFile) + } + await configure({ + sinks, + loggers: [ + { + category: 'ethjs', + lowestLevel: args.logLevel as any, + // Use all configured sink names. + sinks: Object.keys(sinks), + }, + ], + }) + logger = new LogtapeLogger(getLogtapeLogger(['ethjs', 'client'])) const { config, customGenesisState, customGenesisStateRoot, metricsServer } = - await generateClientConfig(args) - - logger = config.logger + await generateClientConfig({ ...args, logger }) // Do not wait for client to be fully started so that we can hookup SIGINT handling // else a SIGINT before may kill the process in unclean manner @@ -366,14 +436,14 @@ async function run() { client.config.chainCommon.gteHardfork(Hardfork.Paris) && (args.rpcEngine === false || args.rpcEngine === undefined) ) { - config.logger.warn(`Engine RPC endpoint not activated on a post-Merge HF setup.`) + config.logger?.warn(`Engine RPC endpoint not activated on a post-Merge HF setup.`) } if (metricsServer !== undefined) servers.push(metricsServer) config.superMsg('Client started successfully') return { client, servers } }) .catch((e) => { - config.logger.error('Error starting client', e) + config.logger?.error('Error starting client', e) return null }) @@ -388,8 +458,8 @@ async function run() { process.on('uncaughtException', (err) => { // Handles uncaught exceptions that are thrown in async events/functions and aren't caught in // main client process - config.logger.error(`Uncaught error: ${err.message}`) - config.logger.error(err) + config.logger?.error(`Uncaught error: ${err.message}`) + config.logger?.error(err.stack ?? err.message) void stopClient(config, clientStartPromise) }) diff --git a/packages/client/bin/repl.ts b/packages/client/bin/repl.ts index 8a8f8f32044..637fd6a8492 100644 --- a/packages/client/bin/repl.ts +++ b/packages/client/bin/repl.ts @@ -6,11 +6,179 @@ import { createInlineClient } from '../src/util/index.ts' import { startRPCServers } from './startRPC.ts' import { generateClientConfig, getArgs } from './utils.ts' +import chalk from 'chalk' +import * as winston from 'winston' +import DailyRotateFile from 'winston-daily-rotate-file' + import type { Common } from '@ethereumjs/common' import type { GenesisState } from '@ethereumjs/util' +import type { Logger as WinstonLoggerType } from 'winston' import type { Config } from '../src/config.ts' import type { EthereumClient } from '../src/index.ts' -import type { ClientOpts } from '../src/types.ts' +import type { ClientOpts, Logger } from '../src/types.ts' + +const { createLogger, format, transports: wTransports } = winston + +export type LoggerArgs = { logFile: string; logLevelFile: 'error' | 'warn' | 'info' | 'debug' } & { + logRotate?: boolean + logMaxFiles?: number +} + +const { combine, timestamp, label, printf } = format + +/** + * Attention API + * + * If set string will be displayed on all log messages + */ +let attentionHF: string | null = null +let attentionCL: string | null = null + +const LevelColors = { + error: 'red', + warn: 'yellow', + info: 'green', + debug: 'white', +} as const + +/** + * Adds stack trace to error message if included + */ +const errorFormat = format((info: any) => { + if (info.message instanceof Error && info.message.stack !== undefined) { + return { ...info, message: info.message.stack } + } + if (info instanceof Error && info.stack !== undefined) { + return { ...info, message: info.stack } + } + return info +}) + +/** + * Returns the formatted log output optionally with colors enabled + * + * Optional info parameters: + * `attentionCL`: pass in string to `info.attentionCL` to set and permanently + * display and `null` to deactivate + * `attentionHF`: pass in string to `info.attentionHF` to set and permanently + * display and `null` to deactivate + * + */ +function logFormat(colors = false) { + return printf( + (info: { + level: string + message: unknown + [key: string]: unknown + }) => { + let level = info.level.toUpperCase() + + if (info.message === undefined) info.message = '(empty message)' + + if (colors) { + const color = chalk[LevelColors[info.level as keyof typeof LevelColors]] + level = color(level) + + const regex = /(\w+)=(.+?)(?:\s|$)/g + const replaceFn = (_: any, tag: string, char: string) => `${color(tag)}=${char} ` + info.message = (info.message as string).replace(regex, replaceFn) + if (typeof info.attentionCL === 'string') + info.attentionCL = info.attentionCL.replace(regex, replaceFn) + if (typeof info.attentionHF === 'string') + info.attentionHF = info.attentionHF.replace(regex, replaceFn) + } + + if (info.attentionCL !== undefined) attentionCL = info.attentionCL as string + if (info.attentionHF !== undefined) attentionHF = info.attentionHF as string + const CLLog = attentionCL !== null ? `[ ${attentionCL} ] ` : '' + const HFLog = attentionHF !== null ? `[ ${attentionHF} ] ` : '' + + const msg = `[${info.timestamp}] ${level} ${CLLog}${HFLog}${info.message}` + return msg + }, + ) +} + +/** + * Returns the complete logger format + */ +export function formatConfig(colors = false) { + return combine( + errorFormat(), + format.splat(), + label({ label: 'ethereumjs' }), + timestamp({ format: 'MM-DD|HH:mm:ss' }), + logFormat(colors), + ) +} + +/** + * Returns a transport with log file saving (rotates if args.logRotate is true) + */ +export function logFileTransport(args: LoggerArgs) { + let filename = args.logFile + const opts = { + level: args.logLevelFile, + format: formatConfig(), + } + if (args.logRotate !== true) { + return new wTransports.File({ + ...opts, + filename, + }) + } else { + // Insert %DATE% before the last period + const lastPeriod = filename.lastIndexOf('.') + filename = `${filename.substring(0, lastPeriod)}.%DATE%${filename.substring(lastPeriod)}` + const logger = new DailyRotateFile({ + ...opts, + filename, + maxFiles: args.logMaxFiles, + }) + return logger + } +} + +export class WinstonLogger implements Logger { + public logger + + constructor(logger: WinstonLoggerType) { + this.logger = logger + + // Bind methods for logger instance + this.info = this.info.bind(this) + this.warn = this.warn.bind(this) + this.error = this.error.bind(this) + this.debug = this.debug.bind(this) + } + info(message: string, ...meta: any[]) { + this.logger?.info(`${message}`, ...meta) + } + + warn(message: string, ...meta: any[]) { + this.logger?.warn(`${message}`, ...meta) + } + + error(message: string, ...meta: any[]) { + this.logger?.error(`${message}`, ...meta) + } + + debug(message: string, ...meta: any[]) { + this.logger?.debug(`${message}`, ...meta) + } + + isInfoEnabled() { + return this.logger?.isInfoEnabled() + } + + configure(args: { [key: string]: any }) { + this.logger?.configure(args) + } + + getLevel() { + return this.logger?.level + } +} const setupClient = async ( config: Config, @@ -79,13 +247,20 @@ const activateRPCMethods = async (replServer: repl.REPLServer, allRPCMethods: an }) } + // TODO this assumes hardcoded winston logger is being changed, so check if not winston, don't allow logLevel as a command replServer.defineCommand('logLevel', { help: `Sets the log level. Example usage: .logLevel info`, action(params) { + const logger = (replServer.context.client as EthereumClient).config.logger + if (logger === undefined || !(logger instanceof WinstonLogger)) { + console.log('logLevel is only supported when using Winston logger.') + this.displayPrompt() + return + } const level = params if (['debug', 'info', 'warn', 'error'].includes(level)) { - for (const transport of (replServer.context.client as EthereumClient).config.logger - .transports) { + // TODO type this out better so we don't have to cast to type + for (const transport of logger.logger.transports) { transport.level = level } } else { @@ -97,7 +272,7 @@ const activateRPCMethods = async (replServer: repl.REPLServer, allRPCMethods: an }) } -const setupRepl = async (args: ClientOpts) => { +const setupRepl = async (args: ClientOpts & { logger: Logger | undefined }) => { const { config, customGenesisState, common } = await generateClientConfig(args) const { client, executionRPC, engineRPC } = await setupClient( config, @@ -127,4 +302,22 @@ const setupRepl = async (args: ClientOpts) => { // TODO define more commands similar to geth admin package to allow basic tasks like knowing when the client is fully synced } -await setupRepl(getArgs()) +const args = getArgs() +const transports: any[] = [ + new wTransports.Console({ + level: args.logLevel, + silent: args.logLevel === 'off', + format: formatConfig(true), + }), +] +if (typeof args.logFile === 'string') { + transports.push(logFileTransport(args as LoggerArgs)) +} + +// up the chain to logger instantiation, this file contains the `winston` import +const logger = createLogger({ + transports, + format: formatConfig(), + level: args.logLevel, +}) +await setupRepl({ ...args, logger: new WinstonLogger(logger) }) diff --git a/packages/client/bin/startRPC.ts b/packages/client/bin/startRPC.ts index 5ec7d91ab81..8e6e05f587a 100644 --- a/packages/client/bin/startRPC.ts +++ b/packages/client/bin/startRPC.ts @@ -68,9 +68,9 @@ function parseJwtSecret(config: Config, jwtFilePath?: string): Uint8Array { jwtSecret = randomBytes(32) writeFileSync(defaultJwtPath, bytesToUnprefixedHex(jwtSecret), {}) - config.logger.info(`New Engine API JWT token created path=${defaultJwtPath}`) + config.logger?.info(`New Engine API JWT token created path=${defaultJwtPath}`) } - config.logger.info(`Using Engine API with JWT token authentication path=${usedJwtPath}`) + config.logger?.info(`Using Engine API with JWT token authentication path=${usedJwtPath}`) return jwtSecret } @@ -138,12 +138,12 @@ export function startRPCServers(client: EthereumClient, args: RPCArgs) { : undefined, }) rpcHttpServer.listen(rpcPort, rpcAddr) - logger.info( + logger?.info( `Started JSON RPC Server address=http://${rpcAddr}:${rpcPort} namespaces=${namespaces}${ withEngineMethods ? ' rpcEngineAuth=' + rpcEngineAuth.toString() : '' }`, ) - logger.debug( + logger?.debug( `Methods available at address=http://${rpcAddr}:${rpcPort} namespaces=${namespaces} methods=${Object.keys( methods, ).join(',')}`, @@ -162,12 +162,12 @@ export function startRPCServers(client: EthereumClient, args: RPCArgs) { const rpcWsServer = createWsRPCServerListener(opts) if (rpcWsServer) rpcWsServer.listen(wsPort) - logger.info( + logger?.info( `Started JSON RPC Server address=ws://${wsAddr}:${wsPort} namespaces=${namespaces}${ withEngineMethods ? ` rpcEngineAuth=${rpcEngineAuth}` : '' }`, ) - logger.debug( + logger?.debug( `Methods available at address=ws://${wsAddr}:${wsPort} namespaces=${namespaces} methods=${Object.keys( methods, ).join(',')}`, @@ -193,10 +193,10 @@ export function startRPCServers(client: EthereumClient, args: RPCArgs) { : undefined, }) rpcHttpServer.listen(rpcEnginePort, rpcEngineAddr) - logger.info( + logger?.info( `Started JSON RPC server address=http://${rpcEngineAddr}:${rpcEnginePort} namespaces=${namespaces} rpcEngineAuth=${rpcEngineAuth}`, ) - logger.debug( + logger?.debug( `Methods available at address=http://${rpcEngineAddr}:${rpcEnginePort} namespaces=${namespaces} methods=${Object.keys( methods, ).join(',')}`, @@ -216,10 +216,10 @@ export function startRPCServers(client: EthereumClient, args: RPCArgs) { const rpcWsServer = createWsRPCServerListener(opts) if (rpcWsServer) rpcWsServer.listen(wsEnginePort, wsEngineAddr) - logger.info( + logger?.info( `Started JSON RPC Server address=ws://${wsEngineAddr}:${wsEnginePort} namespaces=${namespaces} rpcEngineAuth=${rpcEngineAuth}`, ) - logger.debug( + logger?.debug( `Methods available at address=ws://${wsEngineAddr}:${wsEnginePort} namespaces=${namespaces} methods=${Object.keys( methods, ).join(',')}`, diff --git a/packages/client/bin/utils.ts b/packages/client/bin/utils.ts index 6ed907ac35b..fd1a9703feb 100644 --- a/packages/client/bin/utils.ts +++ b/packages/client/bin/utils.ts @@ -46,17 +46,14 @@ import * as promClient from 'prom-client' import * as yargs from 'yargs' import { hideBin } from 'yargs/helpers' +import type { CustomCrypto } from '@ethereumjs/common' +import type { Address, GenesisState, PrefixedHexString } from '@ethereumjs/util' import { Config, SyncMode } from '../src/config.ts' -import { getLogger } from '../src/logging.ts' import { Event } from '../src/types.ts' +import type { ClientOpts, Logger } from '../src/types.ts' import { parseMultiaddrs } from '../src/util/index.ts' import { setupMetrics } from '../src/util/metrics.ts' -import type { CustomCrypto } from '@ethereumjs/common' -import type { Address, GenesisState, PrefixedHexString } from '@ethereumjs/util' -import type { Logger } from '../src/logging.ts' -import type { ClientOpts } from '../src/types.ts' - export type Account = [address: Address, privateKey: Uint8Array] const networks = Object.keys(Chain).map((network) => network.toLowerCase()) @@ -626,7 +623,7 @@ function generateAccount(): Account { return [address, privKey] } -export async function generateClientConfig(args: ClientOpts) { +export async function generateClientConfig(args: ClientOpts & { logger?: Logger | undefined }) { // Give chainId priority over networkId // Give networkId precedence over network name const chainName = args.chainId ?? args.networkId ?? args.network ?? Chain.Mainnet @@ -755,7 +752,8 @@ export async function generateClientConfig(args: ClientOpts) { args.logFile = args.logFile ? `${networkDir}/ethereumjs.log` : undefined } - const logger: Logger = getLogger(args) + // logger is initialized through here + const logger: Logger | undefined = args.logger let bootnodes if (args.bootnodes !== undefined) { // File path passed, read bootnodes from disk @@ -775,7 +773,7 @@ export async function generateClientConfig(args: ClientOpts) { } }) bootnodes = parseMultiaddrs(nodeURLs) - logger.info(`Reading bootnodes file=${args.bootnodes[0]} num=${nodeURLs.length}`) + logger?.info(`Reading bootnodes file=${args.bootnodes[0]} num=${nodeURLs.length}`) } else { bootnodes = parseMultiaddrs(args.bootnodes) } @@ -822,7 +820,7 @@ export async function generateClientConfig(args: ClientOpts) { } }) // Start the HTTP server which exposes the metrics on http://localhost:${args.prometheusPort}/metrics - logger.info(`Starting Metrics Server on port ${args.prometheusPort}`) + logger?.info(`Starting Metrics Server on port ${args.prometheusPort}`) metricsServer.listen(args.prometheusPort) } @@ -882,12 +880,12 @@ export async function generateClientConfig(args: ClientOpts) { writeFileSync(`${networkDir}/${details.transport}`, details.url) } catch (e) { // In case dir is not really setup, mostly to take care of mockserver in test - config.logger.error(`Error writing listener details to disk: ${(e as Error).message}`) + config.logger?.error(`Error writing listener details to disk: ${(e as Error).message}`) } }) if (customGenesisState !== undefined) { const numAccounts = Object.keys(customGenesisState).length - config.logger.info(`Reading custom genesis state accounts=${numAccounts}`) + config.logger?.info(`Reading custom genesis state accounts=${numAccounts}`) } const customGenesisStateRoot = args.verkleGenesisStateRoot diff --git a/packages/client/package.json b/packages/client/package.json index f56d2854974..6ee0300211d 100644 --- a/packages/client/package.json +++ b/packages/client/package.json @@ -66,6 +66,8 @@ "@ethereumjs/util": "10.0.0-rc.1", "@ethereumjs/vm": "10.0.0-rc.1", "@js-sdsl/ordered-map": "^4.4.2", + "@logtape/file": "^0.9.0", + "@logtape/logtape": "^0.9.0", "@multiformats/multiaddr": "^12.4.0", "@paulmillr/trusted-setups": "^0.1.2", "@polkadot/wasm-crypto": "^7.4.1", diff --git a/packages/client/src/client.ts b/packages/client/src/client.ts index 4ceba524982..7e8c298ed0e 100644 --- a/packages/client/src/client.ts +++ b/packages/client/src/client.ts @@ -110,15 +110,15 @@ export class EthereumClient { const name = this.config.chainCommon.chainName() const chainId = this.config.chainCommon.chainId() const packageJSON = getPackageJSON() - this.config.logger.info( + this.config.logger?.info( `Initializing Ethereumjs client version=v${packageJSON.version} network=${name} chainId=${chainId}`, ) this.config.events.on(Event.SERVER_ERROR, (error) => { - this.config.logger.warn(`Server error: ${error.name} - ${error.message}`) + this.config.logger?.warn(`Server error: ${error.name} - ${error.message}`) }) this.config.events.on(Event.SERVER_LISTENING, (details) => { - this.config.logger.info( + this.config.logger?.info( `Server listener up transport=${details.transport} url=${details.url}`, ) }) @@ -135,7 +135,7 @@ export class EthereumClient { if (this.started) { return false } - this.config.logger.info('Setup networking and services.') + this.config.logger?.info('Setup networking and services.') await this.service.start() this.config.server && (await this.config.server.start()) diff --git a/packages/client/src/config.ts b/packages/client/src/config.ts index 1a0223c15fb..7a5a8012d47 100644 --- a/packages/client/src/config.ts +++ b/packages/client/src/config.ts @@ -4,7 +4,6 @@ import { type Address, BIGINT_0, BIGINT_1, BIGINT_2, BIGINT_256 } from '@ethereu import { EventEmitter } from 'eventemitter3' import { Level } from 'level' -import { getLogger } from './logging.ts' import { RlpxServer } from './net/server/index.ts' import { Event } from './types.ts' import { isBrowser, short } from './util/index.ts' @@ -12,8 +11,7 @@ import { isBrowser, short } from './util/index.ts' import type { BlockHeader } from '@ethereumjs/block' import type { VM, VMProfilerOpts } from '@ethereumjs/vm' import type { Multiaddr } from '@multiformats/multiaddr' -import type { Logger } from './logging.ts' -import type { EventParams, MultiaddrLike, PrometheusMetrics } from './types.ts' +import type { EventParams, Logger, MultiaddrLike, PrometheusMetrics } from './types.ts' export type DataDirectory = (typeof DataDirectory)[keyof typeof DataDirectory] @@ -398,7 +396,7 @@ export class Config { // support blobs and proofs cache for CL getBlobs for upto 1 epoch of data public static readonly BLOBS_AND_PROOFS_CACHE_BLOCKS = 32 - public readonly logger: Logger + public readonly logger: Logger | undefined public readonly syncmode: SyncMode public readonly vm?: VM public readonly datadir: string @@ -566,9 +564,9 @@ export class Config { this.discDns = this.getDnsDiscovery(options.discDns) this.discV4 = options.discV4 ?? true - this.logger = options.logger ?? getLogger({ logLevel: 'error' }) + this.logger = options.logger - this.logger.info(`Sync Mode ${this.syncmode}`) + this.logger?.info(`Sync Mode ${this.syncmode}`) if (this.syncmode !== SyncMode.None) { if (options.server !== undefined) { this.server = options.server @@ -628,7 +626,7 @@ export class Config { const diff = Date.now() - this.lastSyncDate if (diff >= this.syncedStateRemovalPeriod) { this.synchronized = false - this.logger.info( + this.logger?.info( `Sync status reset (no chain updates for ${Math.round(diff / 1000)} seconds).`, ) } @@ -636,7 +634,7 @@ export class Config { } if (this.synchronized !== this.lastSynchronized) { - this.logger.debug( + this.logger?.debug( `Client synchronized=${this.synchronized}${ latest !== null && latest !== undefined ? ' height=' + latest.number : '' } syncTargetHeight=${this.syncTargetHeight} lastSyncDate=${ @@ -712,11 +710,11 @@ export class Config { for (const msg of msgs) { len = msg.length > len ? msg.length : len } - this.logger.info('-'.repeat(len), meta) + this.logger?.info('-'.repeat(len), meta) for (const msg of msgs) { - this.logger.info(msg, meta) + this.logger?.info(msg, meta) } - this.logger.info('-'.repeat(len), meta) + this.logger?.info('-'.repeat(len), meta) } /** diff --git a/packages/client/src/execution/execution.ts b/packages/client/src/execution/execution.ts index 6aa2ae6af7b..17d2123edb4 100644 --- a/packages/client/src/execution/execution.ts +++ b/packages/client/src/execution/execution.ts @@ -49,7 +49,7 @@ export abstract class Execution { */ async open(): Promise { this.started = true - this.config.logger.info('Setup EVM execution.') + this.config.logger?.info('Setup EVM execution.') } /** @@ -57,7 +57,7 @@ export abstract class Execution { */ async stop(): Promise { this.started = false - this.config.logger.info('Stopped execution.') + this.config.logger?.info('Stopped execution.') return true } } diff --git a/packages/client/src/execution/vmexecution.ts b/packages/client/src/execution/vmexecution.ts index 1deb4051dc9..05f39cfb4af 100644 --- a/packages/client/src/execution/vmexecution.ts +++ b/packages/client/src/execution/vmexecution.ts @@ -159,11 +159,11 @@ export class VMExecution extends Execution { valueEncoding: this.config.useStringValueTrieDB ? ValueEncoding.String : ValueEncoding.Bytes, }) - this.config.logger.info(`Setting up merkleVM`) - this.config.logger.info(`Initializing account cache size=${this.config.accountCache}`) - this.config.logger.info(`Initializing storage cache size=${this.config.storageCache}`) - this.config.logger.info(`Initializing code cache size=${this.config.codeCache}`) - this.config.logger.info(`Initializing trie cache size=${this.config.trieCache}`) + this.config.logger?.info(`Setting up merkleVM`) + this.config.logger?.info(`Initializing account cache size=${this.config.accountCache}`) + this.config.logger?.info(`Initializing storage cache size=${this.config.storageCache}`) + this.config.logger?.info(`Initializing code cache size=${this.config.codeCache}`) + this.config.logger?.info(`Initializing trie cache size=${this.config.trieCache}`) const stateManager = new MerkleStateManager({ trie, @@ -206,12 +206,12 @@ export class VMExecution extends Execution { return } if (this.config.statelessVerkle) { - this.config.logger.info(`Setting up verkleVM for stateless verkle execution`) + this.config.logger?.info(`Setting up verkleVM for stateless verkle execution`) stateManager = new StatelessVerkleStateManager({ common: this.config.execCommon, }) } else if (this.config.statefulVerkle) { - this.config.logger.info(`Setting up verkleVM for stateful verkle execution`) + this.config.logger?.info(`Setting up verkleVM for stateful verkle execution`) stateManager = new StatefulVerkleStateManager({ common: this.config.execCommon }) } else throw EthereumJSErrorWithoutCode('EIP-6800 active and no verkle execution mode specified') @@ -300,7 +300,7 @@ export class VMExecution extends Execution { await this.setupVerkleVM() this.vm = this.verkleVM! } else { - this.config.logger.info( + this.config.logger?.info( `Initializing VM merkle statemanager genesis hardfork=${this.hardfork}`, ) await this.setupMerkleVM() @@ -587,7 +587,7 @@ export class VMExecution extends Execution { async jumpVmHead(jumpToHash: Uint8Array, jumpToNumber?: bigint): Promise { return this.runWithLock(async () => { // check if the block is canonical in chain - this.config.logger.warn( + this.config.logger?.warn( `Setting execution head to hash=${short(jumpToHash)} number=${jumpToNumber}`, ) await this.chain.blockchain.setIteratorHead('vm', jumpToHash) @@ -613,7 +613,7 @@ export class VMExecution extends Execution { await this.checkAndReset(startHeadBlock) let canonicalHead = await this.chain.blockchain.getCanonicalHeadBlock() - this.config.logger.debug( + this.config.logger?.debug( `Running execution startHeadBlock=${startHeadBlock?.header.number} canonicalHead=${canonicalHead?.header.number} loop=${loop}`, ) @@ -653,7 +653,7 @@ export class VMExecution extends Execution { if (reorg) { clearCache = true - this.config.logger.info( + this.config.logger?.info( `VM run: Chain reorged, setting new head to block number=${headBlock.header.number} clearCache=${clearCache}.`, ) } else { @@ -736,7 +736,7 @@ export class VMExecution extends Execution { } hash=${bytesToHex(block.hash())} txs=${block.transactions.length} gasUsed=${ result.gasUsed } time=${diffSec}secs` - this.config.logger.warn(msg) + this.config.logger?.warn(msg) } await this.receiptsManager?.saveReceipts(block, result.receipts) @@ -794,14 +794,14 @@ export class VMExecution extends Execution { } if (hasParentStateRoot === true && backStepToHash !== undefined) { - this.config.logger.warn( + this.config.logger?.warn( `${errorMsg}, backStepping vmHead to number=${backStepTo} hash=${short( backStepToHash ?? 'na', )} hasParentStateRoot=${short(backStepToRoot ?? 'na')}:\n${error}`, ) await this.chain.blockchain.setIteratorHead('vm', backStepToHash) } else { - this.config.logger.error( + this.config.logger?.error( `${errorMsg}, couldn't back step to vmHead number=${backStepTo} hash=${short( backStepToHash ?? 'na', )} hasParentStateRoot=${hasParentStateRoot} backStepToRoot=${short( @@ -832,9 +832,9 @@ export class VMExecution extends Execution { }, } writeFileSync(file, JSON.stringify(JSONDump, null, 2)) - this.config.logger.warn(`${errorMsg}:\n${error} payload saved to=${file}`) + this.config.logger?.warn(`${errorMsg}:\n${error} payload saved to=${file}`) } else { - this.config.logger.warn(`${errorMsg}:\n${error}`) + this.config.logger?.warn(`${errorMsg}:\n${error}`) } } @@ -847,7 +847,7 @@ export class VMExecution extends Execution { ) return actualExecuted } else { - this.config.logger.error(`VM execution failed with error`, error) + this.config.logger?.error(`VM execution failed with error`, error) return null } }) @@ -868,15 +868,16 @@ export class VMExecution extends Execution { const tdAdd = this.config.execCommon.gteHardfork(Hardfork.Paris) ? '' : `td=${this.chain.blocks.td} ` - ;(this.config.execCommon.gteHardfork(Hardfork.Paris) - ? this.config.logger.debug - : this.config.logger.info)( - `Executed blocks count=${numExecuted} first=${firstNumber} hash=${firstHash} ${tdAdd}${baseFeeAdd}hardfork=${this.hardfork} last=${lastNumber} hash=${lastHash} txs=${txCounter}`, - ) + const msg = `Executed blocks count=${numExecuted} first=${firstNumber} hash=${firstHash} ${tdAdd}${baseFeeAdd}hardfork=${this.hardfork} last=${lastNumber} hash=${lastHash} txs=${txCounter}` + if (this.config.execCommon.gteHardfork(Hardfork.Paris) === true) { + this.config.logger?.debug(msg) + } else { + this.config.logger?.info(msg) + } await this.chain.update(false) } else { - this.config.logger.debug( + this.config.logger?.debug( `No blocks executed past chain head hash=${short(endHeadBlock.hash())} number=${ endHeadBlock.header.number }`, @@ -915,10 +916,10 @@ export class VMExecution extends Execution { this.config.execution && vmHeadBlock.header.number < canonicalHead.header.number ) { - this.config.logger.info(`Starting execution run ${infoStr}`) + this.config.logger?.info(`Starting execution run ${infoStr}`) void this.run(true, true) } else { - this.config.logger.info(`Skipped execution run ${infoStr}`) + this.config.logger?.info(`Skipped execution run ${infoStr}`) } return true } @@ -958,7 +959,7 @@ export class VMExecution extends Execution { * - Range of blocks, '5-10' */ async executeBlocks(first: number, last: number, txHashes: string[]) { - this.config.logger.info('Preparing for block execution (debug mode, no services started)...') + this.config.logger?.info('Preparing for block execution (debug mode, no services started)...') const block = await this.chain.blockchain.getBlock(first) const startExecutionHardfork = this.config.execCommon.getHardforkBy({ @@ -1003,9 +1004,9 @@ export class VMExecution extends Execution { block.transactions.length } gasUsed=${res.gasUsed} time=${diffSec}secs` if (diffSec <= this.MAX_TOLERATED_BLOCK_TIME) { - this.config.logger.info(msg) + this.config.logger?.info(msg) } else { - this.config.logger.warn(msg) + this.config.logger?.warn(msg) } } else { let count = 0 @@ -1016,7 +1017,7 @@ export class VMExecution extends Execution { const txHash = bytesToHex(tx.hash()) if (allTxs || txHashes.includes(txHash)) { const res = await runTx(vm, { block, tx }) - this.config.logger.info( + this.config.logger?.info( `Executed tx hash=${txHash} gasUsed=${res.totalGasSpent} from block num=${blockNumber}`, ) count += 1 @@ -1024,9 +1025,9 @@ export class VMExecution extends Execution { } if (count === 0) { if (!allTxs) { - this.config.logger.warn(`Block number ${first} contains no txs with provided hashes`) + this.config.logger?.warn(`Block number ${first} contains no txs with provided hashes`) } else { - this.config.logger.info(`Block has 0 transactions (no execution)`) + this.config.logger?.info(`Block has 0 transactions (no execution)`) } } } @@ -1039,19 +1040,19 @@ export class VMExecution extends Execution { const deactivatedStats = { size: 0, reads: 0, hits: 0, writes: 0 } let stats stats = sm['_caches']?.account?.stats() ?? deactivatedStats - this.config.logger.info( + this.config.logger?.info( `Account cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}`, ) stats = sm['_caches']?.storage?.stats() ?? deactivatedStats - this.config.logger.info( + this.config.logger?.info( `Storage cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}`, ) stats = sm['_caches']?.code?.stats() ?? deactivatedStats - this.config.logger.info( + this.config.logger?.info( `Code cache stats size=${stats.size} reads=${stats.reads} hits=${stats.hits} writes=${stats.writes}`, ) const tStats = sm['_trie'].database().stats() - this.config.logger.info( + this.config.logger?.info( `Trie cache stats size=${tStats.size} reads=${tStats.cache.reads} hits=${tStats.cache.hits} ` + `writes=${tStats.cache.writes} readsDB=${tStats.db.reads} hitsDB=${tStats.db.hits} writesDB=${tStats.db.writes}`, ) diff --git a/packages/client/src/logging.ts b/packages/client/src/logging.ts deleted file mode 100644 index 8b61b87a5e7..00000000000 --- a/packages/client/src/logging.ts +++ /dev/null @@ -1,149 +0,0 @@ -import chalk from 'chalk' -import * as winston from 'winston' -import DailyRotateFile from 'winston-daily-rotate-file' - -import type { Logger as WinstonLogger } from 'winston' -const { createLogger, format, transports: wTransports } = winston - -export type Logger = WinstonLogger -type LoggerArgs = { logFile: string; logLevelFile: 'error' | 'warn' | 'info' | 'debug' } & { - logRotate?: boolean - logMaxFiles?: number -} - -const { combine, timestamp, label, printf } = format - -/** - * Attention API - * - * If set string will be displayed on all log messages - */ -let attentionHF: string | null = null -let attentionCL: string | null = null - -const LevelColors = { - error: 'red', - warn: 'yellow', - info: 'green', - debug: 'white', -} as const - -/** - * Adds stack trace to error message if included - */ -const errorFormat = format((info: any) => { - if (info.message instanceof Error && info.message.stack !== undefined) { - return { ...info, message: info.message.stack } - } - if (info instanceof Error && info.stack !== undefined) { - return { ...info, message: info.stack } - } - return info -}) - -/** - * Returns the formatted log output optionally with colors enabled - * - * Optional info parameters: - * `attentionCL`: pass in string to `info.attentionCL` to set and permanently - * display and `null` to deactivate - * `attentionHF`: pass in string to `info.attentionHF` to set and permanently - * display and `null` to deactivate - * - */ -function logFormat(colors = false) { - return printf( - (info: { - level: string - message: unknown - [key: string]: unknown - }) => { - let level = info.level.toUpperCase() - - if (info.message === undefined) info.message = '(empty message)' - - if (colors) { - const color = chalk[LevelColors[info.level as keyof typeof LevelColors]] - level = color(level) - - const regex = /(\w+)=(.+?)(?:\s|$)/g - const replaceFn = (_: any, tag: string, char: string) => `${color(tag)}=${char} ` - info.message = (info.message as string).replace(regex, replaceFn) - if (typeof info.attentionCL === 'string') - info.attentionCL = info.attentionCL.replace(regex, replaceFn) - if (typeof info.attentionHF === 'string') - info.attentionHF = info.attentionHF.replace(regex, replaceFn) - } - - if (info.attentionCL !== undefined) attentionCL = info.attentionCL as string - if (info.attentionHF !== undefined) attentionHF = info.attentionHF as string - const CLLog = attentionCL !== null ? `[ ${attentionCL} ] ` : '' - const HFLog = attentionHF !== null ? `[ ${attentionHF} ] ` : '' - - const msg = `[${info.timestamp}] ${level} ${CLLog}${HFLog}${info.message}` - return msg - }, - ) -} - -/** - * Returns the complete logger format - */ -function formatConfig(colors = false) { - return combine( - errorFormat(), - format.splat(), - label({ label: 'ethereumjs' }), - timestamp({ format: 'MM-DD|HH:mm:ss' }), - logFormat(colors), - ) -} - -/** - * Returns a transport with log file saving (rotates if args.logRotate is true) - */ -function logFileTransport(args: LoggerArgs) { - let filename = args.logFile - const opts = { - level: args.logLevelFile, - format: formatConfig(), - } - if (args.logRotate !== true) { - return new wTransports.File({ - ...opts, - filename, - }) - } else { - // Insert %DATE% before the last period - const lastPeriod = filename.lastIndexOf('.') - filename = `${filename.substring(0, lastPeriod)}.%DATE%${filename.substring(lastPeriod)}` - const logger = new DailyRotateFile({ - ...opts, - filename, - maxFiles: args.logMaxFiles, - }) - return logger - } -} - -/** - * Returns a formatted {@link Logger} - */ -export function getLogger(args: { [key: string]: any } = { logLevel: 'info' }) { - const transports: any[] = [ - new wTransports.Console({ - level: args.logLevel, - silent: args.logLevel === 'off', - format: formatConfig(true), - }), - ] - if (typeof args.logFile === 'string') { - transports.push(logFileTransport(args as LoggerArgs)) - } - const logger = createLogger({ - transports, - format: formatConfig(), - level: args.logLevel, - }) - return logger -} diff --git a/packages/client/src/miner/miner.ts b/packages/client/src/miner/miner.ts index c55d9066e9d..1ab2bc15acf 100644 --- a/packages/client/src/miner/miner.ts +++ b/packages/client/src/miner/miner.ts @@ -92,7 +92,7 @@ export class Miner { blockNumber: this.service.chain.headers.height + BIGINT_1, }) if (this.config.chainCommon.hardforkGteHardfork(nextBlockHf, Hardfork.Paris)) { - this.config.logger.info('Miner: reached merge hardfork - stopping') + this.config.logger?.info('Miner: reached merge hardfork - stopping') this.stop() return } @@ -134,7 +134,7 @@ export class Miner { if (typeof this.ethash === 'undefined') { return } - this.config.logger.info('Miner: Finding next PoW solution 🔨') + this.config.logger?.info('Miner: Finding next PoW solution 🔨') const header = this.latestBlockHeader() this.ethashMiner = this.ethash.getMiner(header) const solution = await this.ethashMiner.iterate(-1) @@ -143,7 +143,7 @@ export class Miner { return } this.nextSolution = solution - this.config.logger.info('Miner: Found PoW solution 🔨') + this.config.logger?.info('Miner: Found PoW solution 🔨') return solution } @@ -155,7 +155,7 @@ export class Miner { const latestBlockHeader = this.latestBlockHeader() const target = Number(latestBlockHeader.timestamp) * 1000 + this.period - Date.now() const timeout = BIGINT_0 > target ? 0 : target - this.config.logger.debug( + this.config.logger?.debug( `Miner: Chain updated with block ${ latestBlockHeader.number }. Queuing next block assembly in ${Math.round(timeout / 1000)}s`, @@ -173,7 +173,7 @@ export class Miner { this.running = true this._boundChainUpdatedHandler = this.chainUpdated.bind(this) this.config.events.on(Event.CHAIN_UPDATED, this._boundChainUpdatedHandler) - this.config.logger.info(`Miner started. Assembling next block in ${this.period / 1000}s`) + this.config.logger?.info(`Miner started. Assembling next block in ${this.period / 1000}s`) void this.queueNextAssembly() return true } @@ -215,7 +215,7 @@ export class Miner { if ( (this.service.chain.blockchain as any).consensus.cliqueCheckRecentlySigned(header) === true ) { - this.config.logger.info(`Miner: We have too recently signed, waiting for next block`) + this.config.logger?.info(`Miner: We have too recently signed, waiting for next block`) this.assembling = false return } @@ -223,7 +223,7 @@ export class Miner { if (this.config.chainCommon.consensusType() === ConsensusType.ProofOfWork) { while (this.nextSolution === undefined) { - this.config.logger.info(`Miner: Waiting to find next PoW solution 🔨`) + this.config.logger?.info(`Miner: Waiting to find next PoW solution 🔨`) await new Promise((r) => setTimeout(r, 1000)) } } @@ -290,7 +290,7 @@ export class Miner { }) const txs = await this.service.txPool.txsByPriceAndNonce(vmCopy, { baseFee: baseFeePerGas }) - this.config.logger.info( + this.config.logger?.info( `Miner: Assembling block from ${txs.length} eligible txs ${ typeof baseFeePerGas === 'bigint' && baseFeePerGas !== BIGINT_0 ? `(baseFee: ${baseFeePerGas})` @@ -316,14 +316,14 @@ export class Miner { if (blockBuilder.gasUsed > gasLimit - BigInt(21000)) { // If block has less than 21000 gas remaining, consider it full blockFull = true - this.config.logger.info( + this.config.logger?.info( `Miner: Assembled block full (gasLeft: ${gasLimit - blockBuilder.gasUsed})`, ) } } else { // If there is an error adding a tx, it will be skipped const hash = bytesToHex(txs[index].hash()) - this.config.logger.debug( + this.config.logger?.debug( `Skipping tx ${hash}, error encountered when trying to add tx:\n${error}`, ) } @@ -336,7 +336,7 @@ export class Miner { if (this.config.saveReceipts) { await this.execution.receiptsManager?.saveReceipts(block, receipts) } - this.config.logger.info( + this.config.logger?.info( `Miner: Sealed block with ${block.transactions.length} txs ${ this.config.chainCommon.consensusType() === ConsensusType.ProofOfWork ? `(difficulty: ${block.header.difficulty})` @@ -364,7 +364,7 @@ export class Miner { clearTimeout(this._nextAssemblyTimeoutId) } this.running = false - this.config.logger.info('Miner stopped.') + this.config.logger?.info('Miner stopped.') return true } } diff --git a/packages/client/src/miner/pendingBlock.ts b/packages/client/src/miner/pendingBlock.ts index e40996251db..47e606e0a34 100644 --- a/packages/client/src/miner/pendingBlock.ts +++ b/packages/client/src/miner/pendingBlock.ts @@ -205,12 +205,12 @@ export class PendingBlock { baseFee: baseFeePerGas, allowedBlobs, }) - this.config.logger.info( + this.config.logger?.info( `Pending: Assembling block from ${txs.length} eligible txs (baseFee: ${baseFeePerGas})`, ) const { addedTxs, skippedByAddErrors, blobTxs } = await this.addTransactions(builder, txs) - this.config.logger.info( + this.config.logger?.info( `Pending: Added txs=${addedTxs} skippedByAddErrors=${skippedByAddErrors} from total=${txs.length} tx candidates`, ) @@ -304,7 +304,7 @@ export class PendingBlock { const withdrawalsStr = block.withdrawals !== undefined ? ` withdrawals=${block.withdrawals.length}` : '' const blobsStr = blobs ? ` blobs=${blobs.blobs.length}` : '' - this.config.logger.info( + this.config.logger?.info( `Pending: Built block number=${block.header.number} txs=${ block.transactions.length }${withdrawalsStr}${blobsStr} skippedByAddErrors=${skippedByAddErrors} hash=${bytesToHex( @@ -316,7 +316,7 @@ export class PendingBlock { } private async addTransactions(builder: BlockBuilder, txs: TypedTransaction[]) { - this.config.logger.info(`Pending: Adding ${txs.length} additional eligible txs`) + this.config.logger?.info(`Pending: Adding ${txs.length} additional eligible txs`) let index = 0 let blockFull = false let skippedByAddErrors = 0 @@ -361,7 +361,7 @@ export class PendingBlock { if (error.message === 'tx has a higher gas limit than the remaining gas in the block') { if (builder.gasUsed > (builder as any).headerData.gasLimit - BigInt(21000)) { // If block has less than 21000 gas remaining, consider it full - this.config.logger.info(`Pending: Assembled block full`) + this.config.logger?.info(`Pending: Assembled block full`) addTxResult = AddTxResult.BlockFull } else { addTxResult = AddTxResult.SkippedByGasLimit @@ -369,13 +369,13 @@ export class PendingBlock { } else if ((error as Error).message.includes('blobs missing')) { // Remove the blob tx which doesn't has blobs bundled this.txPool.removeByHash(bytesToHex(tx.hash()), tx) - this.config.logger.error( + this.config.logger?.error( `Pending: Removed from txPool a blob tx ${bytesToHex(tx.hash())} with missing blobs`, ) addTxResult = AddTxResult.RemovedByErrors } else { // If there is an error adding a tx, it will be skipped - this.config.logger.debug( + this.config.logger?.debug( `Pending: Skipping tx ${bytesToHex( tx.hash(), )}, error encountered when trying to add tx:\n${error}`, diff --git a/packages/client/src/net/peer/peer.ts b/packages/client/src/net/peer/peer.ts index 2ed12f3424b..fcc4bf63bd7 100644 --- a/packages/client/src/net/peer/peer.ts +++ b/packages/client/src/net/peer/peer.ts @@ -121,7 +121,7 @@ export abstract class Peer extends EventEmitter { this.config.syncTargetHeight < latest.number) ) { this.config.syncTargetHeight = height - this.config.logger.info(`New sync target height=${height} hash=${short(latest.hash())}`) + this.config.logger?.info(`New sync target height=${height} hash=${short(latest.hash())}`) } } } diff --git a/packages/client/src/net/peerpool.ts b/packages/client/src/net/peerpool.ts index bb4674c5df3..8921a96ccf7 100644 --- a/packages/client/src/net/peerpool.ts +++ b/packages/client/src/net/peerpool.ts @@ -74,7 +74,7 @@ export class PeerPool { }) this.config.events.on(Event.PEER_ERROR, (error, peer) => { if (this.pool.get(peer.id)) { - this.config.logger.warn(`Peer error: ${error} ${peer}`) + this.config.logger?.warn(`Peer error: ${error} ${peer}`) this.ban(peer) } }) @@ -247,17 +247,17 @@ export class PeerPool { if (this.noPeerPeriods >= NO_PEER_PERIOD_COUNT) { this.noPeerPeriods = 0 if (this.config.server !== undefined) { - this.config.logger.info('Restarting RLPx server') + this.config.logger?.info('Restarting RLPx server') await this.config.server.stop() await this.config.server.start() - this.config.logger.info('Reinitiating server bootstrap') + this.config.logger?.info('Reinitiating server bootstrap') await this.config.server.bootstrap() } } else { let tablesize: number | undefined = 0 if (this.config.server !== undefined && this.config.server.discovery) { tablesize = this.config.server.dpt?.getPeers().length - this.config.logger.info(`Looking for suited peers: peertablesize=${tablesize}`) + this.config.logger?.info(`Looking for suited peers: peertablesize=${tablesize}`) } } } else { diff --git a/packages/client/src/net/server/rlpxserver.ts b/packages/client/src/net/server/rlpxserver.ts index e31ccf03460..4de4f988eaa 100644 --- a/packages/client/src/net/server/rlpxserver.ts +++ b/packages/client/src/net/server/rlpxserver.ts @@ -243,7 +243,7 @@ export class RlpxServer extends Server { if (typeof this.config.port === 'number') { this.dpt.bind(this.config.port, '0.0.0.0') } - this.config.logger.info( + this.config.logger?.info( `Started discovery service discV4=${this.config.discV4} dns=${this.config.discDns} refreshInterval=${this.refreshInterval}`, ) }) @@ -276,7 +276,7 @@ export class RlpxServer extends Server { try { await peer.accept(rlpxPeer, this) this.peers.set(peer.id, peer) - this.config.logger.debug(`Peer connected: ${peer}`) + this.config.logger?.debug(`Peer connected: ${peer}`) this.config.events.emit(Event.PEER_CONNECTED, peer) } catch (error: any) { // Fixes a memory leak where RlpxPeer objects could not be GCed, @@ -291,7 +291,7 @@ export class RlpxServer extends Server { const peer = this.peers.get(id) if (peer) { this.peers.delete(peer.id) - this.config.logger.debug( + this.config.logger?.debug( `Peer disconnected (${rlpxPeer.getDisconnectPrefix(reason)}): ${peer}`, ) this.config.events.emit(Event.PEER_DISCONNECTED, peer) diff --git a/packages/client/src/net/server/server.ts b/packages/client/src/net/server/server.ts index 4cd262ac859..8de2ba07ba0 100644 --- a/packages/client/src/net/server/server.ts +++ b/packages/client/src/net/server/server.ts @@ -74,7 +74,7 @@ export class Server { const protocols: Protocol[] = Array.from(this.protocols) await Promise.all(protocols.map((p) => p.open())) this.started = true - this.config.logger.info(`Started ${this.name} server maxPeers=${this.config.maxPeers}`) + this.config.logger?.info(`Started ${this.name} server maxPeers=${this.config.maxPeers}`) return true } @@ -89,7 +89,7 @@ export class Server { */ async stop(): Promise { this.started = false - this.config.logger.info(`Stopped ${this.name} server.`) + this.config.logger?.info(`Stopped ${this.name} server.`) return this.started } @@ -100,7 +100,7 @@ export class Server { */ addProtocols(protocols: Protocol[]) { if (this.started) { - this.config.logger.error('Cannot require protocols after server has been started') + this.config.logger?.error('Cannot require protocols after server has been started') return false } for (const p of protocols) { diff --git a/packages/client/src/rpc/index.ts b/packages/client/src/rpc/index.ts index 6d65e5d410f..0c30c406060 100644 --- a/packages/client/src/rpc/index.ts +++ b/packages/client/src/rpc/index.ts @@ -55,7 +55,7 @@ export class RPCManager { }) } } - this._config.logger.debug(`RPC Initialized ${Object.keys(methods).join(', ')}`) + this._config.logger?.debug(`RPC Initialized ${Object.keys(methods).join(', ')}`) return methods } diff --git a/packages/client/src/rpc/modules/debug.ts b/packages/client/src/rpc/modules/debug.ts index 90978be03f0..667ce9f6081 100644 --- a/packages/client/src/rpc/modules/debug.ts +++ b/packages/client/src/rpc/modules/debug.ts @@ -460,8 +460,14 @@ export class Debug { */ async verbosity(params: [number]) { const [level] = params + if (this.client.config.logger === undefined) { + throw { + code: INTERNAL_ERROR, + message: `no logger available`, + } + } this.client.config.logger.configure({ level: logLevels[level] }) - return `level: ${this.client.config.logger.level}` + return `level: ${this.client.config.logger?.getLevel()}` } /** diff --git a/packages/client/src/rpc/modules/engine/CLConnectionManager.ts b/packages/client/src/rpc/modules/engine/CLConnectionManager.ts index cf16768d8a0..7ecfd9983e2 100644 --- a/packages/client/src/rpc/modules/engine/CLConnectionManager.ts +++ b/packages/client/src/rpc/modules/engine/CLConnectionManager.ts @@ -1,10 +1,9 @@ import { Hardfork } from '@ethereumjs/common' -import { Event } from '../../../types.ts' +import { Event, type Logger } from '../../../types.ts' import { short, timeDiff } from '../../../util/index.ts' import type { Block } from '@ethereumjs/block' -import type winston from 'winston' import type { Config } from '../../../config.ts' import type { ExecutionPayloadV1, @@ -58,7 +57,8 @@ type PayloadToPayloadStats = { txs: { [key: number]: number } } -const logCLStatus = (logger: winston.Logger, logMsg: string, logLevel: logLevel) => { +const logCLStatus = (logger: Logger | undefined, logMsg: string, logLevel: logLevel) => { + if (logger === undefined) return logger[logLevel](enginePrefix + logMsg) } export class CLConnectionManager { @@ -344,7 +344,7 @@ export class CLConnectionManager { return } if (!this.config.synchronized) { - this.config.logger.info('') + this.config.logger?.info('') if (!this._lastPayload) { logCLStatus(this.config.logger, 'No consensus payload received yet', logLevel.INFO) } else { @@ -381,7 +381,7 @@ export class CLConnectionManager { public newPayloadLog() { if (this._lastPayload) { const payloadMsg = this._getPayloadLogMsg(this._lastPayload) - this.config.logger.info('') + this.config.logger?.info('') logCLStatus( this.config.logger, `New consensus payload received ${payloadMsg}`, diff --git a/packages/client/src/rpc/modules/engine/engine.ts b/packages/client/src/rpc/modules/engine/engine.ts index 2ed2ed46363..9bcd21d67dc 100644 --- a/packages/client/src/rpc/modules/engine/engine.ts +++ b/packages/client/src/rpc/modules/engine/engine.ts @@ -384,7 +384,7 @@ export class Engine { let response = error if (!response) { const validationError = `Error assembling block from payload during initialization` - this.config.logger.debug(validationError) + this.config.logger?.debug(validationError) const latestValidHash = await validHash( hexToBytes(parentHash as PrefixedHexString), this.chain, @@ -402,7 +402,7 @@ export class Engine { this.connectionManager.updatePayloadStats(headBlock) const hardfork = headBlock.common.hardfork() if (hardfork !== this.lastNewPayloadHF && this.lastNewPayloadHF !== '') { - this.config.logger.info( + this.config.logger?.info( `Hardfork change along new payload block number=${headBlock.header.number} hash=${short( headBlock.hash(), )} old=${this.lastNewPayloadHF} new=${hardfork}`, @@ -651,7 +651,7 @@ export class Engine { // if can't be executed then return syncing/accepted if (!executed) { - this.config.logger.debug( + this.config.logger?.debug( `Skipping block(s) execution for headBlock=${headBlock.header.number} hash=${short( headBlock.hash(), )} : pendingBlocks=${blocks.length - i}(limit=${ @@ -697,7 +697,7 @@ export class Engine { } const validationError = `Error verifying block while running: ${errorMsg}` - this.config.logger.error(validationError) + this.config.logger?.error(validationError) const response = { status: Status.INVALID, latestValidHash, validationError } this.invalidBlocks.set(blockHash.slice(2), error as Error) @@ -898,7 +898,7 @@ export class Engine { const prevError = this.invalidBlocks.get(headBlockHash.slice(2)) if (prevError !== undefined) { const validationError = `Received block previously marked INVALID: ${prevError.message}` - this.config.logger.debug(validationError) + this.config.logger?.debug(validationError) const latestValidHash = null const payloadStatus = { status: Status.INVALID, latestValidHash, validationError } const response = { payloadStatus, payloadId: null } @@ -917,7 +917,7 @@ export class Engine { (await this.skeleton.getBlockByHash(head, true)) ?? (await this.chain.getBlock(head)) } catch { - this.config.logger.debug( + this.config.logger?.debug( `Forkchoice announced head block unknown to EL hash=${short(headBlockHash)}`, ) const payloadStatus = { @@ -934,7 +934,7 @@ export class Engine { */ const hardfork = headBlock.common.hardfork() if (hardfork !== this.lastForkchoiceUpdatedHF && this.lastForkchoiceUpdatedHF !== '') { - this.config.logger.info( + this.config.logger?.info( `Hardfork change along forkchoice head block update number=${ headBlock.header.number } hash=${short(headBlock.hash())} old=${this.lastForkchoiceUpdatedHF} new=${hardfork}`, @@ -944,7 +944,7 @@ export class Engine { // Always keep beaconSync skeleton updated so that it stays updated with any skeleton sync // requirements that might come later because of reorg or CL restarts - this.config.logger.debug( + this.config.logger?.debug( `Forkchoice requested update to new head number=${headBlock.header.number} hash=${short( headBlock.hash(), )}`, @@ -1016,7 +1016,7 @@ export class Engine { this.config.ignoreStatelessInvalidExecs === true ) { // jump the vm head to failing block so that next block can be executed - this.config.logger.debug( + this.config.logger?.debug( `Jumping the stalled vmHead forward to hash=${this.execution.chainStatus.hash} height=${this.execution.chainStatus.height} to continue the execution`, ) await this.execution.jumpVmHead( diff --git a/packages/client/src/rpc/modules/engine/util/newPayload.ts b/packages/client/src/rpc/modules/engine/util/newPayload.ts index be9388f6549..e99d66fb77a 100644 --- a/packages/client/src/rpc/modules/engine/util/newPayload.ts +++ b/packages/client/src/rpc/modules/engine/util/newPayload.ts @@ -157,7 +157,7 @@ export const assembleBlock = async ( return { block } } catch (error) { const validationError = `Error assembling block from payload: ${error}` - config.logger.error(validationError) + config.logger?.error(validationError) const latestValidHash = await validHash( hexToBytes(payload.parentHash as PrefixedHexString), chain, diff --git a/packages/client/src/service/fullethereumservice.ts b/packages/client/src/service/fullethereumservice.ts index 67c7b18f022..8b8b910dd93 100644 --- a/packages/client/src/service/fullethereumservice.ts +++ b/packages/client/src/service/fullethereumservice.ts @@ -45,7 +45,7 @@ export class FullEthereumService extends Service { constructor(options: ServiceOptions) { super(options) - this.config.logger.info('Full sync mode') + this.config.logger?.info('Full sync mode') const { metaDB } = options if (metaDB !== undefined) { @@ -87,7 +87,7 @@ export class FullEthereumService extends Service { // also with skipOpen this call is a sync call as no async operation is executed // as good as creating the synchronizer here void this.switchToBeaconSync(true) - this.config.logger.info(`Post-merge 🐼 client mode: run with CL client.`) + this.config.logger?.info(`Post-merge 🐼 client mode: run with CL client.`) } else { this.synchronizer = new FullSynchronizer({ config: this.config, @@ -146,7 +146,7 @@ export class FullEthereumService extends Service { async open() { if (this.synchronizer !== undefined) { - this.config.logger.info( + this.config.logger?.info( `Preparing for sync using FullEthereumService with ${ this.synchronizer instanceof BeaconSynchronizer ? 'BeaconSynchronizer' @@ -154,7 +154,7 @@ export class FullEthereumService extends Service { }.`, ) } else { - this.config.logger.info('Starting FullEthereumService with no syncing.') + this.config.logger?.info('Starting FullEthereumService with no syncing.') } // Broadcast pending txs to newly connected peer this.config.events.on(Event.POOL_PEER_ADDED, (peer) => { @@ -188,7 +188,7 @@ export class FullEthereumService extends Service { await this.execution.setupVerkleVM() this.execution.vm = this.execution.verkleVM! } else { - this.execution.config.logger.info( + this.execution.config.logger?.info( `Initializing VM merkle statemanager genesis hardfork=${this.execution.hardfork}`, ) await this.execution.setupMerkleVM() @@ -247,17 +247,17 @@ export class FullEthereumService extends Service { } } } else { - this.config.logger.debug( + this.config.logger?.debug( `skipping snapsync since cl (skeleton) synchronized=${this.skeleton?.synchronized}`, ) } } else { - this.config.logger.warn( + this.config.logger?.warn( 'skipping building head state as neither execution is started nor snapsync is available', ) } } catch (error) { - this.config.logger.error(`Error building headstate error=${error}`) + this.config.logger?.error(`Error building headstate error=${error}`) } finally { this.building = false } @@ -361,7 +361,7 @@ export class FullEthereumService extends Service { } case 'NewBlockHashes': { if (this.config.chainCommon.gteHardfork(Hardfork.Paris)) { - this.config.logger.debug( + this.config.logger?.debug( `Dropping peer ${peer.id} for sending NewBlockHashes after merge (EIP-3675)`, ) this.pool.ban(peer, 9000000) @@ -376,7 +376,7 @@ export class FullEthereumService extends Service { } case 'NewBlock': { if (this.config.chainCommon.gteHardfork(Hardfork.Paris)) { - this.config.logger.debug( + this.config.logger?.debug( `Dropping peer ${peer.id} for sending NewBlock after merge (EIP-3675)`, ) this.pool.ban(peer, 9000000) diff --git a/packages/client/src/service/service.ts b/packages/client/src/service/service.ts index c42007c00f4..8217ef0b47e 100644 --- a/packages/client/src/service/service.ts +++ b/packages/client/src/service/service.ts @@ -83,7 +83,7 @@ export class Service { try { await this.handle(message, protocol, peer) } catch (error: any) { - this.config.logger.debug( + this.config.logger?.debug( `Error handling message (${protocol}:${message.name}): ${error.message}`, ) } @@ -123,13 +123,13 @@ export class Service { this.config.server && this.config.server.addProtocols(protocols) this.config.events.on(Event.POOL_PEER_BANNED, (peer) => - this.config.logger.debug(`Peer banned: ${peer}`), + this.config.logger?.debug(`Peer banned: ${peer}`), ) this.config.events.on(Event.POOL_PEER_ADDED, (peer) => - this.config.logger.debug(`Peer added: ${peer}`), + this.config.logger?.debug(`Peer added: ${peer}`), ) this.config.events.on(Event.POOL_PEER_REMOVED, (peer) => - this.config.logger.debug(`Peer removed: ${peer}`), + this.config.logger?.debug(`Peer removed: ${peer}`), ) await this.pool.open() @@ -164,7 +164,7 @@ export class Service { this._statsInterval = setInterval(await this.stats.bind(this), this.STATS_INTERVAL) this.running = true - this.config.logger.info(`Started ${this.name} service.`) + this.config.logger?.info(`Started ${this.name} service.`) return true } @@ -180,7 +180,7 @@ export class Service { clearInterval(this._statsInterval) await this.synchronizer?.stop() this.running = false - this.config.logger.info(`Stopped ${this.name} service.`) + this.config.logger?.info(`Stopped ${this.name} service.`) return true } @@ -193,14 +193,14 @@ export class Service { const msg = `Memory stats usage=${heapUsed} MB percentage=${percentage}%` if (this._statsCounter % 4 === 0) { - this.config.logger.info(msg) + this.config.logger?.info(msg) this._statsCounter = 0 } else { - this.config.logger.debug(msg) + this.config.logger?.debug(msg) } if (percentage >= this.MEMORY_SHUTDOWN_THRESHOLD && !this.config.shutdown) { - this.config.logger.error('EMERGENCY SHUTDOWN DUE TO HIGH MEMORY LOAD...') + this.config.logger?.error('EMERGENCY SHUTDOWN DUE TO HIGH MEMORY LOAD...') process.kill(process.pid, 'SIGINT') } this._statsCounter += 1 diff --git a/packages/client/src/service/skeleton.ts b/packages/client/src/service/skeleton.ts index e6aecc1238c..886843367e7 100644 --- a/packages/client/src/service/skeleton.ts +++ b/packages/client/src/service/skeleton.ts @@ -216,7 +216,7 @@ export class Skeleton extends MetaDBManager { if (linked && this.status.progress.subchains.length > 1) { // Remove all other subchains as no more relevant const junkedSubChains = this.status.progress.subchains.splice(1) - this.config.logger.debug( + this.config.logger?.debug( `Canonical subchain linked with main, removing junked chains ${junkedSubChains .map((s) => `[tail=${s.tail} head=${s.head} next=${short(s.next)}]`) .join(',')}`, @@ -264,7 +264,7 @@ export class Skeleton extends MetaDBManager { headBlock = newBlock } lastchain.head = headBlock.header.number - this.config.logger.debug( + this.config.logger?.debug( `lastchain head fast forwarded from=${head} to=${lastchain.head} tail=${lastchain.tail}`, ) } @@ -304,14 +304,14 @@ export class Skeleton extends MetaDBManager { if (lastchain.tail > number) { // Not a noop / double head announce, abort with a reorg if (force) { - this.config.logger.warn( + this.config.logger?.warn( `Skeleton setHead before tail, resetting skeleton tail=${lastchain.tail} head=${lastchain.head} newHead=${number}`, ) lastchain.head = number lastchain.tail = number lastchain.next = head.header.parentHash } else { - this.config.logger.debug( + this.config.logger?.debug( `Skeleton announcement before tail, will reset skeleton tail=${lastchain.tail} head=${lastchain.head} newHead=${number}`, ) } @@ -321,7 +321,7 @@ export class Skeleton extends MetaDBManager { // post this if block const mayBeDupBlock = await this.getBlock(number) if (mayBeDupBlock !== undefined && equalsBytes(mayBeDupBlock.header.hash(), head.hash())) { - this.config.logger.debug( + this.config.logger?.debug( `Skeleton duplicate ${force ? 'setHead' : 'announcement'} tail=${lastchain.tail} head=${ lastchain.head } number=${number} hash=${short(head.hash())}`, @@ -331,7 +331,7 @@ export class Skeleton extends MetaDBManager { // Since its not a dup block, so there is reorg in the chain or at least in the head // which we will let it get addressed post this if else block if (force) { - this.config.logger.debug( + this.config.logger?.debug( `Skeleton head reorg tail=${lastchain.tail} head=${ lastchain.head } number=${number} expected=${short( @@ -339,7 +339,7 @@ export class Skeleton extends MetaDBManager { )} actual=${short(head.hash())}`, ) } else { - this.config.logger.debug( + this.config.logger?.debug( `Skeleton differing announcement tail=${lastchain.tail} head=${lastchain.head} number=${number}`, ) } @@ -350,13 +350,13 @@ export class Skeleton extends MetaDBManager { await this.fastForwardHead(lastchain, number - BIGINT_1) // If its still less than number then its gapped head if (lastchain.head + BIGINT_1 < number) { - this.config.logger.debug( + this.config.logger?.debug( `Beacon chain gapped setHead head=${lastchain.head} newHead=${number}`, ) return true } } else { - this.config.logger.debug( + this.config.logger?.debug( `Beacon chain gapped announcement head=${lastchain.head} newHead=${number}`, ) return true @@ -365,7 +365,7 @@ export class Skeleton extends MetaDBManager { const parent = await this.getBlock(number - BIGINT_1) if (parent === undefined || !equalsBytes(parent.hash(), head.header.parentHash)) { if (force) { - this.config.logger.warn( + this.config.logger?.warn( `Beacon chain forked ancestor=${parent?.header.number} hash=${short( parent?.hash() ?? 'NA', )} want=${short(head.header.parentHash)}`, @@ -381,7 +381,7 @@ export class Skeleton extends MetaDBManager { this.status.progress.subchains.push(lastchain) this.status.linked = await this.checkLinked() } - this.config.logger.debug( + this.config.logger?.debug( `Beacon chain extended new head=${lastchain.head} tail=${lastchain.tail} next=${short( lastchain.next, )}`, @@ -416,7 +416,7 @@ export class Skeleton extends MetaDBManager { this.lastFcuTime = Date.now() } - this.config.logger.debug( + this.config.logger?.debug( `New skeleton head announced number=${head.header.number} hash=${short( head.hash(), )} force=${force}`, @@ -432,7 +432,7 @@ export class Skeleton extends MetaDBManager { } this.status.linked = true this.status.canonicalHeadReset = false - this.config.logger.debug( + this.config.logger?.debug( `Initing empty skeleton with current chain head tail=${lastchain.tail} head=${ lastchain.head } next=${short(lastchain.next)}`, @@ -473,14 +473,14 @@ export class Skeleton extends MetaDBManager { if (truncateTailTo !== undefined) { subchain.tail = truncateTailTo.header.number subchain.next = truncateTailTo.header.parentHash - this.config.logger.info( + this.config.logger?.info( `Truncated subchain0 with head=${subchain.head} to a new tail=${ subchain.tail } next=${short(subchain.next)} before overlaying a new subchain`, ) } else { // clear out this subchain - this.config.logger.info( + this.config.logger?.info( `Dropping subchain0 with head=${subchain.head} before overlaying a new subchain as truncateTailToNumber=${truncateTailToNumber} block not available `, ) this.status.progress.subchains.splice(0, 1) @@ -529,7 +529,7 @@ export class Skeleton extends MetaDBManager { // reset canonical head, don't change linked status because parent was // found in canonical chain this.status.canonicalHeadReset = true - this.config.logger.info( + this.config.logger?.info( `Truncated subchain tail for chain reorg to the subchain head=${ subchain.tail } next=${short(subchain.next)} linked=${this.status.linked} canonicalHeadReset=${ @@ -540,7 +540,7 @@ export class Skeleton extends MetaDBManager { subchain.tail = truncateTailTo.header.number subchain.next = truncateTailTo.header.parentHash // just reset tail and no need to modify linked status - this.config.logger.info( + this.config.logger?.info( `Truncated subchain with head=${subchain.head} to a new tail=${ subchain.tail } next=${short(subchain.next)} linked=${this.status.linked} canonicalHeadReset=${ @@ -630,7 +630,7 @@ export class Skeleton extends MetaDBManager { const diff = Date.now() - this.lastSyncDate if (diff >= this.config.syncedStateRemovalPeriod) { this.synchronized = false - this.config.logger.info( + this.config.logger?.info( `Cl (skeleton) sync status reset (no chain updates for ${Math.round( diff / 1000, )} seconds).`, @@ -640,7 +640,7 @@ export class Skeleton extends MetaDBManager { } if (this.synchronized !== this.lastSynchronized) { - this.config.logger.debug( + this.config.logger?.debug( `Cl (skeleton) synchronized=${this.synchronized}${ latest !== null && latest !== undefined ? ' height=' + latest.number : '' } syncTargetHeight=${this.config.syncTargetHeight} lastSyncDate=${ @@ -667,7 +667,7 @@ export class Skeleton extends MetaDBManager { // blocking fill with engineParentLookupMaxDepth as fcU tries to put max engineParentLookupMaxDepth await this.blockingTailBackfillWithCutoff(this.chain.config.engineParentLookupMaxDepth).catch( (e) => { - this.config.logger.debug(`blockingTailBackfillWithCutoff exited with error=${e}`) + this.config.logger?.debug(`blockingTailBackfillWithCutoff exited with error=${e}`) }, ) } @@ -885,7 +885,7 @@ export class Skeleton extends MetaDBManager { // its head independent of matching or mismatching content if (tail >= this.status.progress.subchains[0].tail) { // Fully overwritten, get rid of the subchain as a whole - this.config.logger.debug( + this.config.logger?.debug( `Previous subchain fully overwritten tail=${tail} head=${head} next=${short(next)}`, ) this.status.progress.subchains.splice(1, 1) @@ -894,7 +894,7 @@ export class Skeleton extends MetaDBManager { } else { // Partially overwritten, trim the head to the overwritten size this.status.progress.subchains[1].head = this.status.progress.subchains[0].tail - BIGINT_1 - this.config.logger.debug( + this.config.logger?.debug( `Previous subchain partially overwritten tail=${tail} head=${head} next=${short( next, )} with newHead=${this.status.progress.subchains[1].head}`, @@ -915,7 +915,7 @@ export class Skeleton extends MetaDBManager { ) { // if subChain1Head is not in the skeleton then all previous subchains are not useful // and better to junk - this.config.logger.debug( + this.config.logger?.debug( `Removing all previous subchains as skeleton missing block at previous subchain head=${this.status.progress.subchains[1].head} or its tail=${this.status.progress.subchains[1].tail}`, ) this.status.progress.subchains.splice(1, this.status.progress.subchains.length - 1) @@ -925,7 +925,7 @@ export class Skeleton extends MetaDBManager { // only merge is we can integrate a big progress, as each merge leads // to disruption of the block fetcher to start a fresh if (head - tail > this.config.skeletonSubchainMergeMinimum) { - this.config.logger.debug( + this.config.logger?.debug( `Previous subchain merged tail=${tail} head=${head} next=${short(next)}`, ) this.status.progress.subchains[0].tail = tail @@ -935,7 +935,7 @@ export class Skeleton extends MetaDBManager { // are invalid since we skipped ahead. merged = true } else { - this.config.logger.debug( + this.config.logger?.debug( `Subchain ignored for merge tail=${tail} head=${head} count=${head - tail}`, ) this.status.progress.subchains.splice(1, 1) @@ -963,7 +963,7 @@ export class Skeleton extends MetaDBManager { let merged = false let tailUpdated = false - this.config.logger.debug( + this.config.logger?.debug( `Skeleton putBlocks start=${blocks[0]?.header.number} hash=${short( blocks[0]?.hash(), )} fork=${blocks[0].common.hardfork()} end=${ @@ -1002,7 +1002,7 @@ export class Skeleton extends MetaDBManager { // Critical error, we expect new incoming blocks to extend the canonical // subchain which is the [0]'th const tailBlock = await this.getBlock(this.status.progress.subchains[0].tail) - this.config.logger.warn( + this.config.logger?.warn( `Blocks don't extend canonical subchain tail=${ this.status.progress.subchains[0].tail } head=${this.status.progress.subchains[0].head} next=${short( @@ -1068,7 +1068,7 @@ export class Skeleton extends MetaDBManager { } if (tailBlock !== undefined && newTail) { - this.config.logger.info(`Backstepped skeleton tail=${newTail} head=${head}`) + this.config.logger?.info(`Backstepped skeleton tail=${newTail} head=${head}`) this.status.progress.subchains[0].tail = tailBlock.header.number this.status.progress.subchains[0].next = tailBlock.header.parentHash await this.writeSyncStatus() @@ -1077,7 +1077,7 @@ export class Skeleton extends MetaDBManager { // we need a new head, emptying the subchains this.status.progress.subchains = [] await this.writeSyncStatus() - this.config.logger.warn( + this.config.logger?.warn( `Couldn't backStep subchain 0, dropping subchains for new head signal`, ) return null @@ -1103,7 +1103,7 @@ export class Skeleton extends MetaDBManager { // if subchain0Head is not too ahead, then fill blocking as it gives better sync // log experience else just trigger if (isChainHeadNearEnough) { - this.config.logger.debug('Attempting blocking fill') + this.config.logger?.debug('Attempting blocking fill') await fillPromise } } @@ -1155,7 +1155,7 @@ export class Skeleton extends MetaDBManager { // fill if (!this.status.linked && blocks.length === maxItems) { void this.tryTailBackfill().catch((e) => { - this.chain.config.logger.debug(`tryTailBackfill exited with error ${e}`) + this.chain.config.logger?.debug(`tryTailBackfill exited with error ${e}`) }) } } @@ -1182,7 +1182,7 @@ export class Skeleton extends MetaDBManager { } if (canonicalHead > BIGINT_0) { - this.config.logger.debug( + this.config.logger?.debug( `Resetting canonicalHead for fillCanonicalChain from=${canonicalHead} to=${newHead}`, ) canonicalHead = newHead @@ -1196,7 +1196,7 @@ export class Skeleton extends MetaDBManager { const start = canonicalHead // This subchain is a reference to update the tail for the very subchain we are filling the data for - this.config.logger.debug( + this.config.logger?.debug( `Starting canonical chain fill canonicalHead=${canonicalHead} subchainHead=${subchain.head} skipUpdateEmit=${skipUpdateEmit}`, ) @@ -1214,7 +1214,7 @@ export class Skeleton extends MetaDBManager { // i) Only if canonicalHeadReset was flagged on causing skeleton to change its tail canonicality // Else we should back step and fetch again as it indicates some concurrency/db errors if (!this.status.canonicalHeadReset) { - this.config.logger.debug( + this.config.logger?.debug( `fillCanonicalChain block number=${number} not found, backStepping...`, ) await this.runWithLock(async () => { @@ -1223,7 +1223,7 @@ export class Skeleton extends MetaDBManager { await this.backStep(number) }) } else { - this.config.logger.debug( + this.config.logger?.debug( `fillCanonicalChain block number=${number} not found canonicalHeadReset=${this.status.canonicalHeadReset}, breaking out...`, ) } @@ -1249,18 +1249,18 @@ export class Skeleton extends MetaDBManager { } } catch (e) { const validationError = `${e}` - this.config.logger.error(`fillCanonicalChain putBlock error=${validationError}`) + this.config.logger?.error(`fillCanonicalChain putBlock error=${validationError}`) const errorMsg = `${validationError}`.toLowerCase() if (errorMsg.includes('block') && errorMsg.includes('not found')) { // see if backstepping is required ot this is just canonicalHeadReset await this.runWithLock(async () => { if (!this.status.canonicalHeadReset) { - this.config.logger.debug( + this.config.logger?.debug( `fillCanonicalChain canonicalHeadReset=${this.status.canonicalHeadReset}, backStepping...`, ) await this.backStep(number) } else { - this.config.logger.debug( + this.config.logger?.debug( `fillCanonicalChain canonicalHeadReset=${this.status.canonicalHeadReset}, breaking out...`, ) } @@ -1277,7 +1277,7 @@ export class Skeleton extends MetaDBManager { // handle insertion failures if (numBlocksInserted !== 1) { - this.config.logger.error( + this.config.logger?.error( `Failed to put block number=${number} fork=${block.common.hardfork()} hash=${short( block.hash(), )} parentHash=${short(block.header.parentHash)}from skeleton chain to canonical`, @@ -1286,25 +1286,25 @@ export class Skeleton extends MetaDBManager { let parent = null try { parent = await this.chain.getBlock(number - BIGINT_1) - this.config.logger.info( + this.config.logger?.info( `ParentByNumber number=${parent?.header.number}, hash=${short( parent?.hash() ?? 'undefined', )} hf=${parent?.common.hardfork()}`, ) } catch { - this.config.logger.error(`Failed to fetch parent of number=${number}`) + this.config.logger?.error(`Failed to fetch parent of number=${number}`) } let parentWithHash = null try { parentWithHash = await this.chain.getBlock(block.header.parentHash) - this.config.logger.info( + this.config.logger?.info( `parentByHash number=${parentWithHash?.header.number}, hash=${short( parentWithHash?.hash() ?? 'undefined', )} hf=${parentWithHash?.common.hardfork()} `, ) } catch { - this.config.logger.error( + this.config.logger?.error( `Failed to fetch parent with parentWithHash=${short(block.header.parentHash)}`, ) } @@ -1334,14 +1334,14 @@ export class Skeleton extends MetaDBManager { } }) if (fillLogIndex >= this.config.numBlocksPerIteration) { - this.config.logger.debug( + this.config.logger?.debug( `Skeleton canonical chain fill status: canonicalHead=${canonicalHead} chainHead=${this.chain.blocks.height} subchainHead=${subchain.head}`, ) fillLogIndex = 0 } } this.filling = false - this.config.logger.debug( + this.config.logger?.debug( `Successfully put=${fillLogIndex} skipped (because already inserted)=${skippedLogIndex} blocks start=${start} end=${canonicalHead} skeletonHead=${subchain.head} from skeleton chain to canonical syncTargetHeight=${this.config.syncTargetHeight}`, ) } @@ -1666,7 +1666,7 @@ export class Skeleton extends MetaDBManager { if (this.pulled !== BIGINT_0 && fetching === true) { const sinceStarted = (new Date().getTime() - this.started) / 1000 beaconSyncETA = `${timeDuration((sinceStarted / Number(this.pulled)) * Number(left))}` - this.config.logger.debug( + this.config.logger?.debug( `Syncing beacon headers downloaded=${this.pulled} left=${left} eta=${beaconSyncETA}`, ) } @@ -1759,26 +1759,26 @@ export class Skeleton extends MetaDBManager { peers = peers !== undefined ? `${peers}` : 'na' // if valid then the status info is short and sweet - this.config.logger.info('') + this.config.logger?.info('') if (isValid) { - this.config.logger.info(`${logPrefix} ${status}${extraStatus} ${vmlogInfo} peers=${peers}`) + this.config.logger?.info(`${logPrefix} ${status}${extraStatus} ${vmlogInfo} peers=${peers}`) } else { // else break into two - this.config.logger.info( + this.config.logger?.info( `${logPrefix} ${status}${extraStatus} synchronized=${this.config.synchronized} peers=${peers}`, ) if (snapLogInfo !== undefined && snapLogInfo !== '') { - this.config.logger.info(`${logPrefix} ${snapLogInfo}`) + this.config.logger?.info(`${logPrefix} ${snapLogInfo}`) } if (vmlogInfo !== undefined && vmlogInfo !== '') { - this.config.logger.info(`${logPrefix} ${vmlogInfo}`) + this.config.logger?.info(`${logPrefix} ${vmlogInfo}`) } if (!isSynced) { - this.config.logger.info(`${logPrefix} ${subchainLog}`) + this.config.logger?.info(`${logPrefix} ${subchainLog}`) } } } else { - this.config.logger.debug( + this.config.logger?.debug( `${logPrefix} ${status} linked=${ this.status.linked } subchains=${this.status.progress.subchains diff --git a/packages/client/src/service/txpool.ts b/packages/client/src/service/txpool.ts index 055f572c74c..58c86876ca9 100644 --- a/packages/client/src/service/txpool.ts +++ b/packages/client/src/service/txpool.ts @@ -209,7 +209,7 @@ export class TxPool { this.POOLED_STORAGE_TIME_LIMIT * 1000 * 60, ) - if (this.config.logger.isInfoEnabled()) { + if (this.config.logger?.isInfoEnabled() === true) { // Only turn on txPool stats calculator if log level is info or above // since all stats calculator does is print `info` logs this._logInterval = setInterval(this._logPoolStats.bind(this), this.LOG_STATISTICS_INTERVAL) @@ -606,7 +606,7 @@ export class TxPool { */ async handleAnnouncedTxs(txs: TypedTransaction[], peer: Peer, peerPool: PeerPool) { if (!this.running || txs.length === 0) return - this.config.logger.debug(`TxPool: received new transactions number=${txs.length}`) + this.config.logger?.debug(`TxPool: received new transactions number=${txs.length}`) this.addToKnownByPeer( txs.map((tx) => tx.hash()), peer, @@ -620,7 +620,7 @@ export class TxPool { newTxHashes[1].push(tx.serialize().byteLength) newTxHashes[2].push(tx.hash()) } catch (error: any) { - this.config.logger.debug( + this.config.logger?.debug( `Error adding tx to TxPool: ${error.message} (tx hash: ${bytesToHex(tx.hash())})`, ) } @@ -654,11 +654,11 @@ export class TxPool { if (reqHashes.length === 0) return - this.config.logger.debug(`TxPool: received new tx hashes number=${reqHashes.length}`) + this.config.logger?.debug(`TxPool: received new tx hashes number=${reqHashes.length}`) const reqHashesStr: UnprefixedHash[] = reqHashes.map(bytesToUnprefixedHex) this.pending = this.pending.concat(reqHashesStr) - this.config.logger.debug( + this.config.logger?.debug( `TxPool: requesting txs number=${reqHashes.length} pending=${this.pending.length}`, ) const getPooledTxs = await peer.eth?.getPooledTransactions({ @@ -672,14 +672,14 @@ export class TxPool { return } const [_, txs] = getPooledTxs - this.config.logger.debug(`TxPool: received requested txs number=${txs.length}`) + this.config.logger?.debug(`TxPool: received requested txs number=${txs.length}`) const newTxHashes: [number[], number[], Uint8Array[]] = [[], [], []] as any for (const tx of txs) { try { await this.add(tx) } catch (error: any) { - this.config.logger.debug( + this.config.logger?.debug( `Error adding tx to TxPool: ${error.message} (tx hash: ${bytesToHex(tx.hash())})`, ) } @@ -883,7 +883,7 @@ export class TxPool { byNonce.set(address, []) } } - this.config.logger.info( + this.config.logger?.info( `txsByPriceAndNonce selected txs=${txs.length}, skipped byNonce=${skippedStats.byNonce} byPrice=${skippedStats.byPrice} byBlobsLimit=${skippedStats.byBlobsLimit}`, ) return txs @@ -897,7 +897,7 @@ export class TxPool { clearInterval(this._cleanupInterval as NodeJS.Timeout) clearInterval(this._logInterval as NodeJS.Timeout) this.running = false - this.config.logger.info('TxPool stopped.') + this.config.logger?.info('TxPool stopped.') return true } @@ -945,13 +945,13 @@ export class TxPool { handlederrors++ } } - this.config.logger.info( + this.config.logger?.info( `TxPool Statistics txs=${this.txsInPool} senders=${this.pool.size} peers=${this.service.pool.peers.length}`, ) - this.config.logger.info( + this.config.logger?.info( `TxPool Statistics broadcasts=${broadcasts}/tx/peer broadcasterrors=${broadcasterrors}/tx/peer knownpeers=${knownpeers} since minutes=${this.POOLED_STORAGE_TIME_LIMIT}`, ) - this.config.logger.info( + this.config.logger?.info( `TxPool Statistics successfuladds=${handledadds} failedadds=${handlederrors} since minutes=${this.HANDLED_CLEANUP_TIME_LIMIT}`, ) } diff --git a/packages/client/src/sync/beaconsync.ts b/packages/client/src/sync/beaconsync.ts index 5b72220d7ad..78ca599d4d3 100644 --- a/packages/client/src/sync/beaconsync.ts +++ b/packages/client/src/sync/beaconsync.ts @@ -79,7 +79,7 @@ export class BeaconSynchronizer extends Synchronizer { const timestamp = this.chain.blocks.latest?.header.timestamp this.config.chainCommon.setHardforkBy({ blockNumber: number, timestamp }) - this.config.logger.info( + this.config.logger?.info( `Latest local block number=${Number(number)} td=${td} hash=${bytesToHex( hash, )} hardfork=${this.config.chainCommon.hardfork()}`, @@ -142,7 +142,7 @@ export class BeaconSynchronizer extends Synchronizer { try { await this.sync() } catch (error: any) { - this.config.logger.error(`Beacon sync error: ${error.message}`) + this.config.logger?.error(`Beacon sync error: ${error.message}`) this.config.events.emit(Event.SYNC_ERROR, error) } await new Promise((resolve) => setTimeout(resolve, this.interval)) @@ -163,7 +163,7 @@ export class BeaconSynchronizer extends Synchronizer { if (!this.opened) return // Clean the current fetcher, later this.start will start it again await this.stop() - this.config.logger.debug( + this.config.logger?.debug( `Beacon sync reorged, new head number=${block.header.number} hash=${short( block.header.hash(), )}`, @@ -220,7 +220,7 @@ export class BeaconSynchronizer extends Synchronizer { this.config.syncTargetHeight < latest.number ) { this.config.syncTargetHeight = height - this.config.logger.info(`New sync target height=${height} hash=${short(latest.hash())}`) + this.config.logger?.info(`New sync target height=${height} hash=${short(latest.hash())}`) } const { tail } = this.skeleton.bounds() @@ -242,7 +242,7 @@ export class BeaconSynchronizer extends Synchronizer { } if (count > BIGINT_0 && (this.fetcher === null || this.fetcher.syncErrored !== undefined)) { - this.config.logger.debug( + this.config.logger?.debug( `syncWithPeer - new ReverseBlockFetcher peer=${ peer?.id } subChainTail=${tail} first=${first} count=${count} chainHeight=${ @@ -270,7 +270,7 @@ export class BeaconSynchronizer extends Synchronizer { async processSkeletonBlocks(blocks: Block[]) { if (blocks.length === 0) { if (this.fetcher !== null) { - this.config.logger.warn('No blocks fetched are applicable for import') + this.config.logger?.warn('No blocks fetched are applicable for import') } return } @@ -280,7 +280,7 @@ export class BeaconSynchronizer extends Synchronizer { const last = blocks[blocks.length - 1].header.number const hash = short(blocks[0].hash()) - this.config.logger.debug( + this.config.logger?.debug( `Imported skeleton blocks count=${blocks.length} first=${first} last=${last} hash=${hash} peers=${this.pool.size}`, ) } diff --git a/packages/client/src/sync/fetcher/accountfetcher.ts b/packages/client/src/sync/fetcher/accountfetcher.ts index 4cc6ad7b5fc..2ae370c7e48 100644 --- a/packages/client/src/sync/fetcher/accountfetcher.ts +++ b/packages/client/src/sync/fetcher/accountfetcher.ts @@ -217,7 +217,7 @@ export class AccountFetcher extends Fetcher return true } catch (error) { - this.config.logger.error(`Error while fetching snapsync: ${error}`) + this.config.logger?.error(`Error while fetching snapsync: ${error}`) return false } finally { this.fetcherDoneFlags.syncing = false @@ -240,7 +240,7 @@ export class AccountFetcher extends Fetcher BIGINT_2EXP256, BIGINT_100, ) - this.config.logger.warn( + this.config.logger?.warn( `accountFetcher completed with pending range done=${fetcherProgress}%`, ) } @@ -254,7 +254,7 @@ export class AccountFetcher extends Fetcher fetcherDoneFlags.storageFetcher.count, BIGINT_100, ) - this.config.logger.warn( + this.config.logger?.warn( `storageFetcher completed with pending tasks done=${reqsDone}% of ${fetcherDoneFlags.storageFetcher.count} queued=${this.storageFetcher.storageRequests.length}`, ) } @@ -269,7 +269,7 @@ export class AccountFetcher extends Fetcher fetcherDoneFlags.byteCodeFetcher.count, BIGINT_100, ) - this.config.logger.warn( + this.config.logger?.warn( `byteCodeFetcher completed with pending tasks done=${reqsDone}% of ${fetcherDoneFlags.byteCodeFetcher.count}`, ) } diff --git a/packages/client/src/sync/fetcher/fetcher.ts b/packages/client/src/sync/fetcher/fetcher.ts index 1983a23dde9..791c9dcd46b 100644 --- a/packages/client/src/sync/fetcher/fetcher.ts +++ b/packages/client/src/sync/fetcher/fetcher.ts @@ -451,7 +451,7 @@ export abstract class Fetcher extends Readable this.finished += jobItems.length cb() } catch (error: any) { - this.config.logger.warn(`Error storing received block or header result: ${error}`) + this.config.logger?.warn(`Error storing received block or header result: ${error}`) const { destroyFetcher, banPeer, stepBack } = this.processStoreError( error, jobItems[0].task, diff --git a/packages/client/src/sync/fullsync.ts b/packages/client/src/sync/fullsync.ts index daa942a9e56..579d020fac0 100644 --- a/packages/client/src/sync/fullsync.ts +++ b/packages/client/src/sync/fullsync.ts @@ -102,7 +102,7 @@ export class FullSynchronizer extends Synchronizer { const timestamp = this.chain.blocks.latest?.header.timestamp this.config.chainCommon.setHardforkBy({ blockNumber: number, timestamp }) - this.config.logger.info( + this.config.logger?.info( `Latest local block number=${Number(number)} td=${td} hash=${short( hash, )} hardfork=${this.config.chainCommon.hardfork()}`, @@ -188,7 +188,7 @@ export class FullSynchronizer extends Synchronizer { this.config.syncTargetHeight < latest.number ) { this.config.syncTargetHeight = height - this.config.logger.info(`New sync target height=${height} hash=${short(latest.hash())}`) + this.config.logger?.info(`New sync target height=${height} hash=${short(latest.hash())}`) } // Start fetcher from a safe distance behind because if the previous fetcher exited @@ -213,7 +213,7 @@ export class FullSynchronizer extends Synchronizer { const fetcherHeight = this.fetcher.first + this.fetcher.count - BIGINT_1 if (height > fetcherHeight) { this.fetcher.count += height - fetcherHeight - this.config.logger.info(`Updated fetcher target to height=${height} peer=${peer} `) + this.config.logger?.info(`Updated fetcher target to height=${height} peer=${peer} `) } } return true @@ -226,14 +226,14 @@ export class FullSynchronizer extends Synchronizer { if (this.config.chainCommon.gteHardfork(Hardfork.Paris)) { if (this.fetcher !== null) { // If we are beyond the merge block we should stop the fetcher - this.config.logger.info('Paris (Merge) hardfork reached, stopping block fetcher') + this.config.logger?.info('Paris (Merge) hardfork reached, stopping block fetcher') this.clearFetcher() } } if (blocks.length === 0) { if (this.fetcher !== null) { - this.config.logger.warn('No blocks fetched are applicable for import') + this.config.logger?.warn('No blocks fetched are applicable for import') } return } @@ -256,7 +256,7 @@ export class FullSynchronizer extends Synchronizer { } } - this.config.logger.info( + this.config.logger?.info( `Imported blocks count=${ blocks.length } first=${first} last=${last} hash=${hash} ${baseFeeAdd}hardfork=${this.config.chainCommon.hardfork()} peers=${ @@ -318,13 +318,13 @@ export class FullSynchronizer extends Synchronizer { } try { await this.chain.blockchain.validateHeader(block.header) - } catch (err) { - this.config.logger.debug( + } catch (err: any) { + this.config.logger?.debug( `Error processing new block from peer ${ peer ? `id=${peer.id.slice(0, 8)}` : '(no peer)' } hash=${short(block.hash())}`, ) - this.config.logger.debug(err) + this.config.logger?.debug(err.stack ?? err.msg) return } // Send NEW_BLOCK to square root of total number of peers in pool @@ -389,7 +389,7 @@ export class FullSynchronizer extends Synchronizer { if (!newSyncHeight) return const [hash, height] = newSyncHeight this.config.syncTargetHeight = height - this.config.logger.info(`New sync target height=${height} hash=${short(hash)}`) + this.config.logger?.info(`New sync target height=${height} hash=${short(hash)}`) // Enqueue if we are close enough to chain head if (min < this.chain.headers.height + BigInt(3000)) { this.fetcher.enqueueByNumberList(blockNumberList, min, height) @@ -406,7 +406,7 @@ export class FullSynchronizer extends Synchronizer { this.config.syncTargetHeight !== BIGINT_0 && this.chain.blocks.height <= this.config.syncTargetHeight - BigInt(50) this.execution.run(true, shouldRunOnlyBatched).catch((e) => { - this.config.logger.error(`Full sync execution trigger errored`, {}, e) + this.config.logger?.error(`Full sync execution trigger errored`, {}, e) }) } diff --git a/packages/client/src/sync/snapsync.ts b/packages/client/src/sync/snapsync.ts index 4799bbd62c8..7a4d1d8dcbf 100644 --- a/packages/client/src/sync/snapsync.ts +++ b/packages/client/src/sync/snapsync.ts @@ -61,7 +61,7 @@ export class SnapSynchronizer extends Synchronizer { await this.chain.open() await this.pool.open() - this.config.logger.info( + this.config.logger?.info( `Opened SnapSynchronizer syncTargetHeight=${this.config.syncTargetHeight ?? 'NA'}`, ) } @@ -112,7 +112,7 @@ export class SnapSynchronizer extends Synchronizer { try { await this.sync() } catch (error: any) { - this.config.logger.error(`Snap sync error: ${error.message}`) + this.config.logger?.error(`Snap sync error: ${error.message}`) this.config.events.emit(Event.SYNC_ERROR, error) } await new Promise((resolve) => setTimeout(resolve, this.interval)) @@ -168,7 +168,7 @@ export class SnapSynchronizer extends Synchronizer { snapTargetRoot, )} hash=${short(snapTargetHash)}` if (fetchingAlreadyDone) { - this.config.logger.debug(snapDoneMsg) + this.config.logger?.debug(snapDoneMsg) } else { this.config.superMsg(snapDoneMsg) } @@ -188,7 +188,7 @@ export class SnapSynchronizer extends Synchronizer { async syncWithPeer(peer?: Peer): Promise { // if skeleton is passed we have to wait for skeleton to be updated if (this.skeleton?.synchronized !== true || this.fetcherDoneFlags.done) { - this.config.logger.info(`SnapSynchronizer - early return ${peer?.id}`) + this.config.logger?.info(`SnapSynchronizer - early return ${peer?.id}`) return false } @@ -197,13 +197,13 @@ export class SnapSynchronizer extends Synchronizer { return false } - this.config.logger.info(`SnapSynchronizer - syncWithPeer ${peer?.id}`) + this.config.logger?.info(`SnapSynchronizer - syncWithPeer ${peer?.id}`) const stateRoot = latest.stateRoot const height = latest.number // eslint-disable-next-line eqeqeq if (this.config.syncTargetHeight == null || this.config.syncTargetHeight < latest.number) { this.config.syncTargetHeight = height - this.config.logger.info(`New sync target height=${height} hash=${bytesToHex(latest.hash())}`) + this.config.logger?.info(`New sync target height=${height} hash=${bytesToHex(latest.hash())}`) } if ( @@ -216,7 +216,7 @@ export class SnapSynchronizer extends Synchronizer { this.fetcherDoneFlags.snapTargetHash = latest.hash() } - this.config.logger.info( + this.config.logger?.info( `syncWithPeer new AccountFetcher peer=${peer?.id} snapTargetHeight=${ this.fetcherDoneFlags.snapTargetHeight } snapTargetRoot=${short(this.fetcherDoneFlags.snapTargetRoot!)} ${ diff --git a/packages/client/src/sync/sync.ts b/packages/client/src/sync/sync.ts index 5ffdb3c6925..5f6173a45e9 100644 --- a/packages/client/src/sync/sync.ts +++ b/packages/client/src/sync/sync.ts @@ -64,7 +64,7 @@ export abstract class Synchronizer { this.config.events.on(Event.POOL_PEER_ADDED, (peer) => { if (this.syncable(peer)) { - this.config.logger.debug(`Found ${this.type} peer: ${peer}`) + this.config.logger?.debug(`Found ${this.type} peer: ${peer}`) } }) @@ -139,7 +139,7 @@ export abstract class Synchronizer { resolveSync(height?: bigint) { this.clearFetcher() const heightStr = typeof height === 'bigint' && height !== BIGINT_0 ? ` height=${height}` : '' - this.config.logger.debug(`Finishing up sync with the current fetcher ${heightStr}`) + this.config.logger?.debug(`Finishing up sync with the current fetcher ${heightStr}`) return true } @@ -148,10 +148,10 @@ export abstract class Synchronizer { if (this._fetcher) { await this._fetcher.blockingFetch() } - this.config.logger.debug(`Fetcher finished fetching...`) + this.config.logger?.debug(`Fetcher finished fetching...`) return this.resolveSync() } catch (error: any) { - this.config.logger.error( + this.config.logger?.error( `Received sync error, stopping sync and clearing fetcher: ${error.message ?? error}`, ) this.clearFetcher() @@ -167,7 +167,7 @@ export abstract class Synchronizer { let peer = await this.best() let numAttempts = 1 while (!peer && this.opened) { - this.config.logger.debug(`Waiting for best peer (attempt #${numAttempts})`) + this.config.logger?.debug(`Waiting for best peer (attempt #${numAttempts})`) await wait(5000) peer = await this.best() numAttempts += 1 @@ -205,7 +205,7 @@ export abstract class Synchronizer { clearInterval(this._syncedStatusCheckInterval as NodeJS.Timeout) await new Promise((resolve) => setTimeout(resolve, this.interval)) this.running = false - this.config.logger.info('Stopped synchronization.') + this.config.logger?.info('Stopped synchronization.') return true } diff --git a/packages/client/src/types.ts b/packages/client/src/types.ts index fa13e3f595a..cb47618de94 100644 --- a/packages/client/src/types.ts +++ b/packages/client/src/types.ts @@ -156,3 +156,14 @@ export type PrometheusMetrics = { feeMarketEIP1559TxGauge: promClient.Gauge blobEIP4844TxGauge: promClient.Gauge } + +export interface Logger { + [logger: string]: any + info(message: string, ...meta: any[]): void + warn(message: string, ...meta: any[]): void + error(message: string, ...meta: any[]): void + debug(message: string, ...meta: any[]): void + isInfoEnabled(): boolean + configure(args: { [key: string]: any }): void + getLevel(): string +} diff --git a/packages/client/src/util/debug.ts b/packages/client/src/util/debug.ts index abdfc7f48bd..ce1a4f019db 100644 --- a/packages/client/src/util/debug.ts +++ b/packages/client/src/util/debug.ts @@ -62,5 +62,5 @@ const main = async () => { main() ` - execution.config.logger.info(code) + execution.config.logger?.info(code) } diff --git a/packages/client/src/util/rpc.ts b/packages/client/src/util/rpc.ts index 3ebf507622f..e25307399b7 100644 --- a/packages/client/src/util/rpc.ts +++ b/packages/client/src/util/rpc.ts @@ -9,8 +9,8 @@ import { jwt } from '../ext/jwt-simple.ts' import type { IncomingMessage } from 'connect' import type { TAlgorithm } from '../ext/jwt-simple.ts' -import type { Logger } from '../logging.ts' import type { RPCManager } from '../rpc/index.ts' +import type { Logger } from '../types.ts' const { json: JSONParser } = bodyParser const { decode } = jwt diff --git a/packages/client/test/cli/cli.spec.ts b/packages/client/test/cli/cli.spec.ts index f23b08614fd..2e256bae66d 100644 --- a/packages/client/test/cli/cli.spec.ts +++ b/packages/client/test/cli/cli.spec.ts @@ -418,7 +418,7 @@ describe('[CLI]', () => { child: ChildProcessWithoutNullStreams, resolve: Function, ) => { - if (message.includes('DEBUG')) { + if (message.includes('DBG')) { assert.isTrue(true, 'debug logging is enabled') child.kill() resolve(undefined) diff --git a/packages/client/test/cli/utils.spec.ts b/packages/client/test/cli/utils.spec.ts index ade1c103997..22258586881 100644 --- a/packages/client/test/cli/utils.spec.ts +++ b/packages/client/test/cli/utils.spec.ts @@ -129,6 +129,6 @@ describe('generateClientConfig', () => { logLevel: 'debug', } const { config } = await generateClientConfig(opts) - assert.equal(config.logger.level, 'debug', 'Log level should be set to debug') + assert.equal(config.logger?.level, 'debug', 'Log level should be set to debug') }) }) diff --git a/packages/client/test/integration/miner.spec.ts b/packages/client/test/integration/miner.spec.ts index e4a3a8e0489..981d62dc085 100644 --- a/packages/client/test/integration/miner.spec.ts +++ b/packages/client/test/integration/miner.spec.ts @@ -9,12 +9,12 @@ import { import { assert, describe, it } from 'vitest' import { Config } from '../../src/config.ts' -import { getLogger } from '../../src/logging.ts' import { Event } from '../../src/types.ts' import { createInlineClient } from '../../src/util/index.ts' import { parseMultiaddrs } from '../../src/util/parse.ts' import type { EthereumClient } from '../../src/index.ts' +import { getLogger } from '../logging.spec.ts' async function setupDevnet(prefundAddress: Address) { const addr = prefundAddress.toString().slice(2) diff --git a/packages/client/test/integration/pow.spec.ts b/packages/client/test/integration/pow.spec.ts index b9cc36482ad..122baf29bc7 100644 --- a/packages/client/test/integration/pow.spec.ts +++ b/packages/client/test/integration/pow.spec.ts @@ -82,7 +82,7 @@ describe('PoW client test', async () => { assert.isTrue(started, 'client started successfully') }, 60000) const message: string = await new Promise((resolve) => { - client.config.logger.on('data', (data: any) => { + client.config.logger?.logger.on('data', (data: any) => { if (data.message.includes('Miner: Found PoW solution') === true) { resolve(data.message) } diff --git a/packages/client/test/logging.spec.ts b/packages/client/test/logging.spec.ts index e946ba624a1..f8c9e5f7408 100644 --- a/packages/client/test/logging.spec.ts +++ b/packages/client/test/logging.spec.ts @@ -1,9 +1,36 @@ import { assert, describe, it } from 'vitest' -import { getLogger } from '../src/logging.ts' +import { createLogger, transports as wTransports } from 'winston' +import { type LoggerArgs, WinstonLogger, formatConfig, logFileTransport } from '../bin/repl.ts' + +/** + * Returns a formatted {@link Logger} + */ +export function getLogger(args: { [key: string]: any } = { logLevel: 'info' }) { + const transports: any[] = [ + new wTransports.Console({ + level: args.logLevel, + silent: args.logLevel === 'off', + format: formatConfig(true), + }), + ] + if (typeof args.logFile === 'string') { + transports.push(logFileTransport(args as LoggerArgs)) + } + const logger = createLogger({ + transports, + format: formatConfig(), + level: args.logLevel, + }) + return new WinstonLogger(logger) +} describe('[Logging]', () => { - const logger = getLogger({ logLevel: 'info', logFile: 'ethereumjs.log', logLevelFile: 'info' }) + const logger = getLogger({ + logLevel: 'info', + logFile: 'ethereumjs.log', + logLevelFile: 'info', + }).logger const format = logger.transports.find((t: any) => t.name === 'console')!.format! it('should have correct transports', () => { diff --git a/packages/client/test/miner/pendingBlock.spec.ts b/packages/client/test/miner/pendingBlock.spec.ts index 24b9d76ccd2..dcf7ed421b2 100644 --- a/packages/client/test/miner/pendingBlock.spec.ts +++ b/packages/client/test/miner/pendingBlock.spec.ts @@ -22,7 +22,6 @@ import { KZG as microEthKZG } from 'micro-eth-signer/kzg' import { assert, describe, it, vi } from 'vitest' import { Config } from '../../src/config.ts' -import { getLogger } from '../../src/logging.ts' import { PendingBlock } from '../../src/miner/index.ts' import { TxPool } from '../../src/service/txpool.ts' import { mockBlockchain } from '../rpc/mockBlockchain.ts' @@ -33,6 +32,7 @@ import type { Blockchain } from '@ethereumjs/blockchain' import type { TypedTransaction } from '@ethereumjs/tx' import type { PrefixedHexString } from '@ethereumjs/util' import type { VM } from '@ethereumjs/vm' +import { getLogger } from '../logging.spec.ts' const kzg = new microEthKZG(trustedSetup) diff --git a/packages/client/test/rpc/debug/verbosity.spec.ts b/packages/client/test/rpc/debug/verbosity.spec.ts index 0f17bd406f4..7930c6f60f3 100644 --- a/packages/client/test/rpc/debug/verbosity.spec.ts +++ b/packages/client/test/rpc/debug/verbosity.spec.ts @@ -23,12 +23,12 @@ describe(method, () => { const levelError = 0 res = await rpc.request(method, [levelError]) assert.equal(res.result, 'level: error', 'verbosity level successfully lowered') - assert.equal(client.config.logger.level, logLevels[levelError]) + assert.equal(client.config.logger?.getLevel(), logLevels[levelError]) // highest level; e.g. be very verbose and show even debug logs const levelDebug = 3 res = await rpc.request(method, [levelDebug]) assert.equal(res.result, 'level: debug', 'verbosity level successfully increased') - assert.equal(client.config.logger.level, logLevels[levelDebug]) + assert.equal(client.config.logger?.getLevel(), logLevels[levelDebug]) }) }) diff --git a/packages/client/test/rpc/engine/CLConnectionManager.spec.ts b/packages/client/test/rpc/engine/CLConnectionManager.spec.ts index df554e1488f..1b62daeb0ba 100644 --- a/packages/client/test/rpc/engine/CLConnectionManager.spec.ts +++ b/packages/client/test/rpc/engine/CLConnectionManager.spec.ts @@ -8,6 +8,7 @@ import { Event } from '../../../src/types.ts' import { postMergeData } from '../../testdata/geth-genesis/post-merge.ts' import type { ForkchoiceUpdate, NewPayload } from '../../../src/rpc/modules/engine/index.ts' +import { getLogger } from '../../logging.spec.ts' const payload: NewPayload = { payload: { @@ -87,9 +88,9 @@ describe('postmerge hardfork', () => { }) describe('Status updates', async () => { - const config = new Config() + const config = new Config({ logger: getLogger({ logLevel: 'info' }) }) const manager = new CLConnectionManager({ config }) - config.logger.on('data', (chunk) => { + config.logger?.logger.on('data', (chunk: any) => { it('received status message', () => { if ((chunk.message as string).includes('consensus forkchoice update head=0x67b9')) { assert.isTrue(true, 'received last fork choice message') @@ -97,7 +98,7 @@ describe('Status updates', async () => { if ((chunk.message as string).includes('consensus payload received number=55504')) { assert.isTrue(true, 'received last payload message') manager.stop() - config.logger.removeAllListeners() + config.logger?.logger.removeAllListeners() } }) }) @@ -116,11 +117,11 @@ describe('updates stats when a new block is processed', () => { number: payload.payload.blockNumber, }, }) - config.logger.on('data', (chunk) => { + config.logger?.logger.on('data', (chunk: any) => { if ((chunk.message as string).includes('Payload stats blocks count=1')) { assert.isTrue(true, 'received last payload stats message') manager.stop() - config.logger.removeAllListeners() + config.logger?.logger.removeAllListeners() } }) diff --git a/packages/client/test/rpc/engine/newPayloadV1.spec.ts b/packages/client/test/rpc/engine/newPayloadV1.spec.ts index 5f4d943a7c7..91c1c119e80 100644 --- a/packages/client/test/rpc/engine/newPayloadV1.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV1.spec.ts @@ -115,7 +115,7 @@ describe(method, () => { it('call with valid data but invalid transactions', async () => { const { chain, server } = await setupChain(postMergeData, 'post-merge', { engine: true }) const rpc = getRPCClient(server) - chain.config.logger.silent = true + chain.config.logger!.logger.silent = true const blockDataWithInvalidTransaction = { ...blockData, transactions: ['0x1'], @@ -136,7 +136,7 @@ describe(method, () => { engine: true, }) const rpc = getRPCClient(server) - chain.config.logger.silent = true + chain.config.logger!.logger.silent = true // Let's mock a non-signed transaction so execution fails const tx = createFeeMarket1559Tx( diff --git a/packages/client/test/rpc/engine/newPayloadV2.spec.ts b/packages/client/test/rpc/engine/newPayloadV2.spec.ts index 86d8fe353dd..87f18f82899 100644 --- a/packages/client/test/rpc/engine/newPayloadV2.spec.ts +++ b/packages/client/test/rpc/engine/newPayloadV2.spec.ts @@ -114,7 +114,7 @@ describe(`${method}: call with executionPayloadV1`, () => { it('call with valid data but invalid transactions', async () => { const { chain, server } = await setupChain(postMergeData, 'post-merge', { engine: true }) const rpc = getRPCClient(server) - chain.config.logger.silent = true + chain.config.logger!.logger.silent = true const blockDataWithInvalidTransaction = { ...blockData, transactions: ['0x1'], @@ -134,7 +134,7 @@ describe(`${method}: call with executionPayloadV1`, () => { engine: true, }) const rpc = getRPCClient(server) - chain.config.logger.silent = true + chain.config.logger!.logger.silent = true // Let's mock a non-signed transaction so execution fails const tx = createFeeMarket1559Tx( diff --git a/packages/client/test/rpc/helpers.ts b/packages/client/test/rpc/helpers.ts index 7c62351ab8f..d6ab87d00a2 100644 --- a/packages/client/test/rpc/helpers.ts +++ b/packages/client/test/rpc/helpers.ts @@ -24,7 +24,6 @@ import { assert } from 'vitest' import { Chain } from '../../src/blockchain/chain.ts' import { Config } from '../../src/config.ts' import { VMExecution } from '../../src/execution/index.ts' -import { getLogger } from '../../src/logging.ts' import { RlpxServer } from '../../src/net/server/rlpxserver.ts' import { RPCManager as Manager } from '../../src/rpc/index.ts' import { Skeleton } from '../../src/service/skeleton.ts' @@ -41,6 +40,7 @@ import type { GenesisState } from '@ethereumjs/util' import type { IncomingMessage } from 'connect' import type { HttpClient, HttpServer } from 'jayson/promise/index.js' import type { EthereumClient } from '../../src/client.ts' +import { getLogger } from '../logging.spec.ts' const config: any = {} config.logger = getLogger(config) diff --git a/packages/client/test/sim/beaconsync.spec.ts b/packages/client/test/sim/beaconsync.spec.ts index 5a8663f09fc..0103d3c6ae1 100644 --- a/packages/client/test/sim/beaconsync.spec.ts +++ b/packages/client/test/sim/beaconsync.spec.ts @@ -5,7 +5,6 @@ import { Client } from 'jayson/promise/index.js' import { assert, describe, it } from 'vitest' import { Config } from '../../src/config.ts' -import { getLogger } from '../../src/logging.ts' import { Event } from '../../src/types.ts' import { createInlineClient } from '../../src/util/index.ts' @@ -21,6 +20,7 @@ import { import type { PrefixedHexString } from '@ethereumjs/util' import type { EthereumClient } from '../../src/client.ts' import type { RlpxServer } from '../../src/net/server/index.ts' +import { getLogger } from '../logging.spec.ts' const client = Client.http({ port: 8545 }) diff --git a/packages/client/test/sim/snapsync.spec.ts b/packages/client/test/sim/snapsync.spec.ts index a12e032a611..7b59c4511ac 100644 --- a/packages/client/test/sim/snapsync.spec.ts +++ b/packages/client/test/sim/snapsync.spec.ts @@ -11,7 +11,6 @@ import { Client } from 'jayson/promise/index.js' import { assert, describe, it } from 'vitest' import { Config } from '../../src/config.ts' -import { getLogger } from '../../src/logging.ts' import { Event } from '../../src/types.ts' import { createInlineClient } from '../../src/util/index.ts' @@ -27,6 +26,7 @@ import { import type { MerkleStateManager } from '@ethereumjs/statemanager' import type { PrefixedHexString } from '@ethereumjs/util' import type { EthereumClient } from '../../src/client.ts' +import { getLogger } from '../logging.spec.ts' const client = Client.http({ port: 8545 }) diff --git a/packages/client/test/sync/beaconsync.spec.ts b/packages/client/test/sync/beaconsync.spec.ts index 6f13c3be9a8..a8516d5b0fc 100644 --- a/packages/client/test/sync/beaconsync.spec.ts +++ b/packages/client/test/sync/beaconsync.spec.ts @@ -7,6 +7,7 @@ import { Chain } from '../../src/blockchain/index.ts' import { Config } from '../../src/config.ts' import { ReverseBlockFetcher } from '../../src/sync/fetcher/reverseblockfetcher.ts' import { Skeleton } from '../../src/sync/index.ts' +import { getLogger } from '../logging.spec.ts' describe('[BeaconSynchronizer]', async () => { const execution: any = { run: () => {} } @@ -131,6 +132,7 @@ describe('[BeaconSynchronizer]', async () => { skeletonSubchainMergeMinimum: 0, accountCache: 10000, storageCache: 1000, + logger: getLogger({ logLevel: 'info' }), }) const pool = new PeerPool() as any const chain = await Chain.create({ config }) @@ -159,30 +161,30 @@ describe('[BeaconSynchronizer]', async () => { /// @ts-expect-error -- Assigning simpler config for testing blocks: { height: BigInt(0) }, } - sync.config.logger.addListener('data', (data: any) => { + sync.config.logger?.logger.addListener('data', (data: any) => { if ((data.message as string).includes('first=5 count=5')) assert.isTrue(true, 'should sync block 5 and target chain start') }) await sync.sync() - sync.config.logger.removeAllListeners() - sync.config.logger.addListener('data', (data: any) => { + sync.config.logger?.logger.removeAllListeners() + sync.config.logger?.logger.addListener('data', (data: any) => { if ((data.message as string).includes('first=1 count=1')) assert.isTrue(true, 'should sync block 1 and target chain start') }) /// @ts-expect-error -- Assigning simpler config for testing skeleton.status.progress.subchains = [{ head: BigInt(10), tail: BigInt(2) }] await sync.sync() - sync.config.logger.removeAllListeners() + sync.config.logger?.logger.removeAllListeners() /// @ts-expect-error -- Assigning simpler config for testing skeleton.status.progress.subchains = [{ head: BigInt(10), tail: BigInt(6) }] /// @ts-expect-error -- Assigning simpler config for testing sync['chain'] = { blocks: { height: BigInt(4) } } - sync.config.logger.addListener('data', (data: any) => { + sync.config.logger?.logger.addListener('data', (data: any) => { if ((data.message as string).includes('first=5 count=1')) assert.isTrue(true, 'should sync block 5 with count 1') }) await sync.sync() - sync.config.logger.removeAllListeners() + sync.config.logger?.logger.removeAllListeners() }) it('should not sync pre-genesis', async () => { @@ -215,12 +217,12 @@ describe('[BeaconSynchronizer]', async () => { /// @ts-expect-error -- Assigning simpler config for testing blocks: { height: BigInt(100) }, } - sync.config.logger.addListener('data', (data: any) => { + sync.config.logger?.logger.addListener('data', (data: any) => { if ((data.message as string).includes('first=5 count=5')) assert.isTrue(true, 'should sync block 5 and target chain start') }) await sync.sync() - sync.config.logger.removeAllListeners() + sync.config.logger?.logger.removeAllListeners() }) it('should extend and set with a valid head', async () => { diff --git a/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts b/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts index 03e2c454c94..720245ca6cb 100644 --- a/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts +++ b/packages/client/test/sync/fetcher/reverseblockfetcher.spec.ts @@ -4,10 +4,10 @@ import { assert, describe, it, vi } from 'vitest' import { Chain } from '../../../src/blockchain/chain.ts' import { Config } from '../../../src/config.ts' -import { getLogger } from '../../../src/logging.ts' import { Skeleton } from '../../../src/service/skeleton.ts' import { Event } from '../../../src/types.ts' import { wait } from '../../integration/util.ts' +import { getLogger } from '../../logging.spec.ts' class PeerPool { idle() {} diff --git a/packages/client/test/sync/skeleton.spec.ts b/packages/client/test/sync/skeleton.spec.ts index b1c858b41e9..b5f147f2735 100644 --- a/packages/client/test/sync/skeleton.spec.ts +++ b/packages/client/test/sync/skeleton.spec.ts @@ -11,7 +11,6 @@ import { assert, describe, it } from 'vitest' import { Chain } from '../../src/blockchain/index.ts' import { Config } from '../../src/config.ts' -import { getLogger } from '../../src/logging.ts' import { Skeleton, errReorgDenied, errSyncMerged } from '../../src/sync/index.ts' import { short } from '../../src/util/index.ts' import { wait } from '../integration/util.ts' @@ -19,6 +18,7 @@ import { mergeTestnetData } from '../testdata/common/mergeTestnet.ts' import { postMergeData } from '../testdata/geth-genesis/post-merge.ts' import type { Block } from '@ethereumjs/block' +import { getLogger } from '../logging.spec.ts' type Subchain = { head: bigint tail: bigint diff --git a/packages/client/test/sync/txpool.spec.ts b/packages/client/test/sync/txpool.spec.ts index 10d8759a89b..5b0f0b34e76 100644 --- a/packages/client/test/sync/txpool.spec.ts +++ b/packages/client/test/sync/txpool.spec.ts @@ -18,11 +18,11 @@ import * as promClient from 'prom-client' import { assert, describe, it } from 'vitest' import { Config } from '../../src/config.ts' -import { getLogger } from '../../src/logging.ts' import { PeerPool } from '../../src/net/peerpool.ts' import { TxPool } from '../../src/service/txpool.ts' import type { PrometheusMetrics } from '../../src/types.ts' +import { getLogger } from '../logging.spec.ts' let prometheusMetrics: PrometheusMetrics | undefined