diff --git a/.changeset/@graphql-hive_gateway-1030-dependencies.md b/.changeset/@graphql-hive_gateway-1030-dependencies.md new file mode 100644 index 000000000..fc6e6bbec --- /dev/null +++ b/.changeset/@graphql-hive_gateway-1030-dependencies.md @@ -0,0 +1,7 @@ +--- +'@graphql-hive/gateway': patch +--- + +dependencies updates: + +- Added dependency [`@graphql-hive/logger@workspace:^` ↗︎](https://www.npmjs.com/package/@graphql-hive/logger/v/workspace:^) (to `dependencies`) diff --git a/.changeset/@graphql-hive_gateway-runtime-1030-dependencies.md b/.changeset/@graphql-hive_gateway-runtime-1030-dependencies.md new file mode 100644 index 000000000..102b3e27b --- /dev/null +++ b/.changeset/@graphql-hive_gateway-runtime-1030-dependencies.md @@ -0,0 +1,9 @@ +--- +'@graphql-hive/gateway-runtime': patch +--- + +dependencies updates: + +- Added dependency [`@envelop/instrumentation@^1.0.0` ↗︎](https://www.npmjs.com/package/@envelop/instrumentation/v/1.0.0) (to `dependencies`) +- Added dependency [`@graphql-hive/logger@workspace:^` ↗︎](https://www.npmjs.com/package/@graphql-hive/logger/v/workspace:^) (to `dependencies`) +- Removed dependency [`@graphql-hive/logger-json@workspace:^` ↗︎](https://www.npmjs.com/package/@graphql-hive/logger-json/v/workspace:^) (from `dependencies`) diff --git a/.changeset/@graphql-mesh_fusion-runtime-1030-dependencies.md b/.changeset/@graphql-mesh_fusion-runtime-1030-dependencies.md new file mode 100644 index 000000000..d381bdd14 --- /dev/null +++ b/.changeset/@graphql-mesh_fusion-runtime-1030-dependencies.md @@ -0,0 +1,7 @@ +--- +'@graphql-mesh/fusion-runtime': patch +--- + +dependencies updates: + +- Added dependency [`@graphql-hive/logger@workspace:^` ↗︎](https://www.npmjs.com/package/@graphql-hive/logger/v/workspace:^) (to `dependencies`) diff --git a/.changeset/@graphql-mesh_plugin-prometheus-1030-dependencies.md b/.changeset/@graphql-mesh_plugin-prometheus-1030-dependencies.md new file mode 100644 index 000000000..02e9f8bf5 --- /dev/null +++ b/.changeset/@graphql-mesh_plugin-prometheus-1030-dependencies.md @@ -0,0 +1,7 @@ +--- +'@graphql-mesh/plugin-prometheus': patch +--- + +dependencies updates: + +- Added dependency [`@graphql-hive/logger@workspace:^` ↗︎](https://www.npmjs.com/package/@graphql-hive/logger/v/workspace:^) (to `dependencies`) diff --git a/.changeset/@graphql-mesh_transport-common-1030-dependencies.md b/.changeset/@graphql-mesh_transport-common-1030-dependencies.md new file mode 100644 index 000000000..2096b7a2b --- /dev/null +++ b/.changeset/@graphql-mesh_transport-common-1030-dependencies.md @@ -0,0 +1,7 @@ +--- +'@graphql-mesh/transport-common': patch +--- + +dependencies updates: + +- Added dependency [`@graphql-hive/logger@workspace:^` ↗︎](https://www.npmjs.com/package/@graphql-hive/logger/v/workspace:^) (to `dependencies`) diff --git a/.changeset/itchy-ways-cross.md b/.changeset/itchy-ways-cross.md new file mode 100644 index 000000000..331529560 --- /dev/null +++ b/.changeset/itchy-ways-cross.md @@ -0,0 +1,19 @@ +--- +'@graphql-mesh/hmac-upstream-signature': major +'@graphql-mesh/transport-http-callback': major +'@graphql-mesh/plugin-opentelemetry': major +'@graphql-mesh/plugin-prometheus': major +'@graphql-mesh/transport-common': major +'@graphql-mesh/plugin-jwt-auth': major +'@graphql-mesh/fusion-runtime': major +'@graphql-mesh/transport-ws': major +'@graphql-hive/gateway': major +'@graphql-hive/gateway-runtime': major +'@graphql-hive/nestjs': major +--- + +Introduce and use the new Hive Logger + +- [Read more about it on the Hive Logger documentation here.](https://the-guild.dev/graphql/hive/docs/logger) + +- If coming from Hive Gateway v1, [read the migration guide here.](https://the-guild.dev/graphql/hive/docs/migration-guides/gateway-v1-v2) diff --git a/.changeset/tough-elephants-shop.md b/.changeset/tough-elephants-shop.md new file mode 100644 index 000000000..2c628b131 --- /dev/null +++ b/.changeset/tough-elephants-shop.md @@ -0,0 +1,7 @@ +--- +'@graphql-hive/logger': major +--- + +Introducing Hive Logger + +[Read more about it on the Hive Logger documentation website.](https://the-guild.dev/graphql/hive/docs/logger) diff --git a/babel.config.cjs b/babel.config.cjs index b5586a0c4..3bb0a78f3 100644 --- a/babel.config.cjs +++ b/babel.config.cjs @@ -11,5 +11,6 @@ module.exports = { ['@babel/plugin-proposal-decorators', { version: '2023-11' }], '@babel/plugin-transform-class-properties', '@babel/plugin-proposal-explicit-resource-management', + '@babel/plugin-transform-private-methods', ], }; diff --git a/e2e/config-syntax-error/config-syntax-error.e2e.ts b/e2e/config-syntax-error/config-syntax-error.e2e.ts index d168008eb..433d0cb74 100644 --- a/e2e/config-syntax-error/config-syntax-error.e2e.ts +++ b/e2e/config-syntax-error/config-syntax-error.e2e.ts @@ -31,8 +31,8 @@ it.skipIf( }), ).rejects.toThrowError( gatewayRunner === 'bun' || gatewayRunner === 'bun-docker' - ? /error: Expected "{" but found "hello"(.|\n)*\/custom-resolvers.ts:8:11/ - : /SyntaxError \[Error\]: Error transforming .*(\/|\\)custom-resolvers.ts: Unexpected token, expected "{" \(8:11\)/, + ? /Expected \\"{\\" but found \\"hello\\"(.|\n)*\/custom-resolvers.ts/ + : /Error transforming .*(\/|\\)custom-resolvers.ts: Unexpected token, expected \\"{\\" \(8:11\)/, ); }, ); diff --git a/e2e/graphos-polling/services/gateway-fastify.ts b/e2e/graphos-polling/services/gateway-fastify.ts index 2e038bab7..77dc1b9b7 100644 --- a/e2e/graphos-polling/services/gateway-fastify.ts +++ b/e2e/graphos-polling/services/gateway-fastify.ts @@ -1,5 +1,5 @@ -import { createGatewayRuntime } from '@graphql-hive/gateway-runtime'; -import { createLoggerFromPino } from '@graphql-hive/logger-pino'; +import { createGatewayRuntime, Logger } from '@graphql-hive/gateway-runtime'; +import { PinoLogWriter } from '@graphql-hive/logger/writers/pino'; import { createOtlpHttpExporter, useOpenTelemetry, @@ -43,8 +43,7 @@ export interface FastifyContext { } const gw = createGatewayRuntime({ - // Integrate Fastify's logger / Pino with the gateway logger - logging: createLoggerFromPino(app.log), + logging: new Logger({ writers: [new PinoLogWriter(app.log)] }), // Align with Fastify requestId: { // Use the same header name as Fastify diff --git a/e2e/hmac-auth-https/package.json b/e2e/hmac-auth-https/package.json index 66efaaf52..5a0c5d49d 100644 --- a/e2e/hmac-auth-https/package.json +++ b/e2e/hmac-auth-https/package.json @@ -9,6 +9,7 @@ "@apollo/server": "^4.12.2", "@apollo/subgraph": "^2.11.0", "@graphql-hive/gateway": "workspace:^", + "@graphql-hive/logger": "workspace:^", "@graphql-mesh/compose-cli": "^1.4.1", "@graphql-mesh/hmac-upstream-signature": "workspace:^", "@graphql-mesh/plugin-jwt-auth": "workspace:^", diff --git a/e2e/hmac-auth-https/services/users/index.ts b/e2e/hmac-auth-https/services/users/index.ts index c77a92e52..a3d375c0c 100644 --- a/e2e/hmac-auth-https/services/users/index.ts +++ b/e2e/hmac-auth-https/services/users/index.ts @@ -2,6 +2,7 @@ import { readFileSync } from 'fs'; import { createServer } from 'https'; import { join } from 'path'; import { buildSubgraphSchema } from '@apollo/subgraph'; +import { Logger } from '@graphql-hive/logger'; import { useHmacSignatureValidation } from '@graphql-mesh/hmac-upstream-signature'; import { JWTExtendContextFields, @@ -20,6 +21,7 @@ const yoga = createYoga({ logging: true, plugins: [ useHmacSignatureValidation({ + log: new Logger({ level: 'debug' }), secret: 'HMAC_SIGNING_SECRET', }), useForwardedJWT({}), diff --git a/e2e/retry-timeout/gateway.config.ts b/e2e/retry-timeout/gateway.config.ts index 0d4e508f8..a929fb4ba 100644 --- a/e2e/retry-timeout/gateway.config.ts +++ b/e2e/retry-timeout/gateway.config.ts @@ -6,13 +6,13 @@ export const gatewayConfig = defineConfig({ maxRetries: 4, }, upstreamTimeout: 300, - plugins(ctx) { + plugins() { return [ ...(process.env['DEDUPLICATE_REQUEST'] ? [useDeduplicateRequest()] : []), { - onFetch() { + onFetch({ context }) { i++; - ctx.logger.info(`[FETCHING] #${i}`); + context.log.info(`[FETCHING] #${i}`); }, }, ]; diff --git a/e2e/supergraph-file-watcher/supergraph-file-watcher.e2e.ts b/e2e/supergraph-file-watcher/supergraph-file-watcher.e2e.ts index 5e1fba2f1..38634cfd1 100644 --- a/e2e/supergraph-file-watcher/supergraph-file-watcher.e2e.ts +++ b/e2e/supergraph-file-watcher/supergraph-file-watcher.e2e.ts @@ -45,7 +45,7 @@ it('should detect supergraph file change and reload schema', async () => { for (;;) { timeout.throwIfAborted(); await setTimeout(100); - if (gw.getStd('both').match(/invalidating supergraph/i)) { + if (gw.getStd('both').match(/supergraph changed/i)) { break; } } diff --git a/examples/hmac-auth-https/example.tar.gz b/examples/hmac-auth-https/example.tar.gz index 3a6dfb2c3..820f5b5df 100644 Binary files a/examples/hmac-auth-https/example.tar.gz and b/examples/hmac-auth-https/example.tar.gz differ diff --git a/examples/hmac-auth-https/package-lock.json b/examples/hmac-auth-https/package-lock.json index 69424220c..c373a1103 100644 --- a/examples/hmac-auth-https/package-lock.json +++ b/examples/hmac-auth-https/package-lock.json @@ -11,6 +11,7 @@ "@apollo/server": "^4.12.2", "@apollo/subgraph": "^2.11.0", "@graphql-hive/gateway": "^1.15.0", + "@graphql-hive/logger": "^0.0.0", "@graphql-mesh/compose-cli": "^1.4.1", "@graphql-mesh/hmac-upstream-signature": "^1.2.27", "@graphql-mesh/plugin-jwt-auth": "^1.5.5", @@ -2103,6 +2104,15 @@ "node": ">=18.0.0" } }, + "node_modules/@graphql-hive/logger": { + "version": "0.0.0", + "resolved": "https://registry.npmjs.org/@graphql-hive/logger/-/logger-0.0.0.tgz", + "integrity": "sha512-OYT33fYvSvPcbqfjZ4uniKQe+525oAuk2k3MKVwIBMjA7oFIjgCoUH0AlKyRjbXuFlmKDeidn+3bzY3S5nNqFw==", + "license": "MIT", + "engines": { + "node": ">=18.0.0" + } + }, "node_modules/@graphql-hive/logger-json": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/@graphql-hive/logger-json/-/logger-json-0.0.4.tgz", diff --git a/examples/hmac-auth-https/package.json b/examples/hmac-auth-https/package.json index 379991fe4..aed850bb3 100644 --- a/examples/hmac-auth-https/package.json +++ b/examples/hmac-auth-https/package.json @@ -13,6 +13,7 @@ "@apollo/server": "^4.12.2", "@apollo/subgraph": "^2.11.0", "@graphql-hive/gateway": "^1.15.0", + "@graphql-hive/logger": "^0.0.0", "@graphql-mesh/compose-cli": "^1.4.1", "@graphql-mesh/hmac-upstream-signature": "^1.2.27", "@graphql-mesh/plugin-jwt-auth": "^1.5.5", diff --git a/examples/hmac-auth-https/services/users/index.ts b/examples/hmac-auth-https/services/users/index.ts index 22429fd51..c03522fb5 100644 --- a/examples/hmac-auth-https/services/users/index.ts +++ b/examples/hmac-auth-https/services/users/index.ts @@ -2,6 +2,7 @@ import { readFileSync } from 'fs'; import { createServer } from 'https'; import { join } from 'path'; import { buildSubgraphSchema } from '@apollo/subgraph'; +import { Logger } from '@graphql-hive/logger'; import { useHmacSignatureValidation } from '@graphql-mesh/hmac-upstream-signature'; import { JWTExtendContextFields, @@ -19,6 +20,7 @@ const yoga = createYoga({ logging: true, plugins: [ useHmacSignatureValidation({ + log: new Logger({ level: 'debug' }), secret: 'HMAC_SIGNING_SECRET', }), useForwardedJWT({}), diff --git a/package.json b/package.json index d888a29e1..182ddd3e6 100644 --- a/package.json +++ b/package.json @@ -32,6 +32,7 @@ "@babel/plugin-proposal-explicit-resource-management": "7.27.4", "@babel/plugin-transform-class-properties": "7.27.1", "@babel/plugin-transform-class-static-block": "7.27.1", + "@babel/plugin-transform-private-methods": "^7.27.1", "@babel/preset-env": "7.27.2", "@babel/preset-typescript": "7.27.1", "@changesets/changelog-github": "^0.5.0", diff --git a/packages/delegate/src/finalizeGatewayRequest.ts b/packages/delegate/src/finalizeGatewayRequest.ts index 661a35b34..413a3a175 100644 --- a/packages/delegate/src/finalizeGatewayRequest.ts +++ b/packages/delegate/src/finalizeGatewayRequest.ts @@ -562,9 +562,6 @@ function finalizeSelectionSet( leave: (node) => { const type = typeInfo.getType(); if (type == null) { - // console.warn( - // `Invalid type for node: ${typeInfo.getParentType()?.name}.${node.name.value}`, - // ); return null; } const namedType = getNamedType(type); diff --git a/packages/fusion-runtime/package.json b/packages/fusion-runtime/package.json index cb4d31c24..ac2af2a18 100644 --- a/packages/fusion-runtime/package.json +++ b/packages/fusion-runtime/package.json @@ -45,6 +45,7 @@ "dependencies": { "@envelop/core": "^5.2.3", "@envelop/instrumentation": "^1.0.0", + "@graphql-hive/logger": "workspace:^", "@graphql-mesh/cross-helpers": "^0.4.10", "@graphql-mesh/transport-common": "workspace:^", "@graphql-mesh/types": "^0.104.0", diff --git a/packages/fusion-runtime/src/executor.ts b/packages/fusion-runtime/src/executor.ts index 2746160de..094aca8a4 100644 --- a/packages/fusion-runtime/src/executor.ts +++ b/packages/fusion-runtime/src/executor.ts @@ -34,7 +34,7 @@ export function getExecutorForUnifiedGraph( () => unifiedGraphManager.getContext(execReq.context), (context) => { function handleExecutor(executor: Executor) { - opts?.transportContext?.logger?.debug( + opts?.transportContext?.log.debug( 'Executing request on unified graph', () => print(execReq.document), ); @@ -50,7 +50,7 @@ export function getExecutorForUnifiedGraph( return handleMaybePromise( () => unifiedGraphManager.getUnifiedGraph(), (unifiedGraph) => { - opts?.transportContext?.logger?.debug( + opts?.transportContext?.log.debug( 'Executing request on unified graph', () => print(execReq.document), ); @@ -70,9 +70,7 @@ export function getExecutorForUnifiedGraph( enumerable: true, get() { return function unifiedGraphExecutorDispose() { - opts?.transportContext?.logger?.debug( - 'Disposing unified graph executor', - ); + opts?.transportContext?.log.debug('Disposing unified graph executor'); return unifiedGraphManager[DisposableSymbols.asyncDispose](); }; }, diff --git a/packages/fusion-runtime/src/federation/supergraph.ts b/packages/fusion-runtime/src/federation/supergraph.ts index 7589136dd..ec33091e9 100644 --- a/packages/fusion-runtime/src/federation/supergraph.ts +++ b/packages/fusion-runtime/src/federation/supergraph.ts @@ -1,7 +1,7 @@ +import { LegacyLogger, Logger } from '@graphql-hive/logger'; import type { YamlConfig } from '@graphql-mesh/types'; import { getInContextSDK, - requestIdByRequest, resolveAdditionalResolversWithoutImport, } from '@graphql-mesh/utils'; import type { @@ -158,7 +158,8 @@ export const handleFederationSupergraph: UnifiedGraphHandler = function ({ onDelegateHooks, additionalTypeDefs: additionalTypeDefsFromConfig = [], additionalResolvers: additionalResolversFromConfig = [], - logger, + // no logger was provided, use a muted logger for consistency across plugin hooks + log: rootLog = new Logger({ level: false }), }: UnifiedGraphHandlerOpts): UnifiedGraphHandlerResult { const additionalTypeDefs = [...asArray(additionalTypeDefsFromConfig)]; const additionalResolvers = [...asArray(additionalResolversFromConfig)]; @@ -230,7 +231,7 @@ export const handleFederationSupergraph: UnifiedGraphHandler = function ({ originalResolver, typeName, onDelegationStageExecuteHooks, - logger, + rootLog, ); } } @@ -278,7 +279,7 @@ export const handleFederationSupergraph: UnifiedGraphHandler = function ({ executableUnifiedGraph, // @ts-expect-error Legacy Mesh RawSource is not compatible with new Mesh subschemas, - logger, + LegacyLogger.from(rootLog), onDelegateHooks || [], ); const stitchingInfo = executableUnifiedGraph.extensions?.[ @@ -306,16 +307,9 @@ export const handleFederationSupergraph: UnifiedGraphHandler = function ({ delegationPlanBuilder = newDelegationPlanBuilder; } const onDelegationPlanDoneHooks: OnDelegationPlanDoneHook[] = []; - let currentLogger = logger; - let requestId: string | undefined; - if (context?.request) { - requestId = requestIdByRequest.get(context.request); - if (requestId) { - currentLogger = currentLogger?.child({ requestId }); - } - } + let log = context.log as Logger; if (sourceSubschema.name) { - currentLogger = currentLogger?.child({ + log = log.child({ subgraph: sourceSubschema.name, }); } @@ -328,7 +322,7 @@ export const handleFederationSupergraph: UnifiedGraphHandler = function ({ variables, fragments, fieldNodes, - logger: currentLogger, + log, context, info, delegationPlanBuilder, diff --git a/packages/fusion-runtime/src/unifiedGraphManager.ts b/packages/fusion-runtime/src/unifiedGraphManager.ts index 26f03a5e7..7330a18ef 100644 --- a/packages/fusion-runtime/src/unifiedGraphManager.ts +++ b/packages/fusion-runtime/src/unifiedGraphManager.ts @@ -1,8 +1,9 @@ +import type { Logger } from '@graphql-hive/logger'; import type { TransportContext, TransportEntry, } from '@graphql-mesh/transport-common'; -import type { Logger, OnDelegateHook } from '@graphql-mesh/types'; +import type { OnDelegateHook } from '@graphql-mesh/types'; import { dispose, isDisposable } from '@graphql-mesh/utils'; import { CRITICAL_ERROR } from '@graphql-tools/executor'; import type { @@ -68,8 +69,7 @@ export interface UnifiedGraphHandlerOpts { onDelegationPlanHooks?: OnDelegationPlanHook[]; onDelegationStageExecuteHooks?: OnDelegationStageExecuteHook[]; onDelegateHooks?: OnDelegateHook[]; - - logger?: Logger; + log?: Logger; } export interface UnifiedGraphHandlerResult { @@ -81,7 +81,7 @@ export interface UnifiedGraphHandlerResult { export interface UnifiedGraphManagerOptions { getUnifiedGraph( - ctx: TransportContext, + ctx: TransportContext | undefined, ): MaybePromise; // Handle the unified graph by any specification handleUnifiedGraph?: UnifiedGraphHandler; @@ -105,7 +105,6 @@ export interface UnifiedGraphManagerOptions { */ batch?: boolean; instrumentation?: () => Instrumentation | undefined; - onUnifiedGraphChange?(newUnifiedGraph: GraphQLSchema): void; } @@ -157,7 +156,7 @@ export class UnifiedGraphManager implements AsyncDisposable { this.onDelegationStageExecuteHooks = opts?.onDelegationStageExecuteHooks || []; if (opts.pollingInterval != null) { - opts.transportContext?.logger?.debug( + opts.transportContext?.log.debug( `Starting polling to Supergraph with interval ${millisecondsToStr(opts.pollingInterval)}`, ); } @@ -170,16 +169,16 @@ export class UnifiedGraphManager implements AsyncDisposable { this.lastLoadTime != null && Date.now() - this.lastLoadTime >= this.opts.pollingInterval ) { - this.opts?.transportContext?.logger?.debug(`Polling Supergraph`); + this.opts?.transportContext?.log.debug(`Polling Supergraph`); this.polling$ = handleMaybePromise( () => this.getAndSetUnifiedGraph(), () => { this.polling$ = undefined; }, (err) => { - this.opts.transportContext?.logger?.error( - 'Failed to poll Supergraph', + this.opts.transportContext?.log.error( err, + 'Failed to poll Supergraph', ); this.polling$ = undefined; }, @@ -187,19 +186,21 @@ export class UnifiedGraphManager implements AsyncDisposable { } if (!this.unifiedGraph) { if (!this.initialUnifiedGraph$) { - this.opts?.transportContext?.logger?.debug( + this.opts?.transportContext?.log.debug( 'Fetching the initial Supergraph', ); if (this.opts.transportContext?.cache) { - this.opts.transportContext?.logger?.debug( - `Searching for Supergraph in cache under key "${UNIFIEDGRAPH_CACHE_KEY}"...`, + this.opts.transportContext?.log.debug( + { key: UNIFIEDGRAPH_CACHE_KEY }, + 'Searching for Supergraph in cache...', ); this.initialUnifiedGraph$ = handleMaybePromise( () => this.opts.transportContext?.cache?.get(UNIFIEDGRAPH_CACHE_KEY), (cachedUnifiedGraph) => { if (cachedUnifiedGraph) { - this.opts.transportContext?.logger?.debug( + this.opts.transportContext?.log.debug( + { key: UNIFIEDGRAPH_CACHE_KEY }, 'Found Supergraph in cache', ); return this.handleLoadedUnifiedGraph(cachedUnifiedGraph, true); @@ -217,7 +218,8 @@ export class UnifiedGraphManager implements AsyncDisposable { () => this.initialUnifiedGraph$!, (v) => { this.initialUnifiedGraph$ = undefined; - this.opts.transportContext?.logger?.debug( + this.opts.transportContext?.log.debug( + { key: UNIFIEDGRAPH_CACHE_KEY }, 'Initial Supergraph fetched', ); return v; @@ -240,7 +242,7 @@ export class UnifiedGraphManager implements AsyncDisposable { this.lastLoadedUnifiedGraph != null && compareSchemas(loadedUnifiedGraph, this.lastLoadedUnifiedGraph) ) { - this.opts.transportContext?.logger?.debug( + this.opts.transportContext?.log.debug( 'Supergraph has not been changed, skipping...', ); this.lastLoadTime = Date.now(); @@ -267,17 +269,18 @@ export class UnifiedGraphManager implements AsyncDisposable { // 60 seconds making sure the unifiedgraph is not kept forever // NOTE: we default to 60s because Cloudflare KV TTL does not accept anything less 60; - this.opts.transportContext.logger?.debug( - `Caching Supergraph with TTL ${ttl}s`, + this.opts.transportContext?.log.debug( + { ttl, key: UNIFIEDGRAPH_CACHE_KEY }, + 'Caching Supergraph', ); - const logCacheSetError = (e: unknown) => { - this.opts.transportContext?.logger?.debug( - `Unable to store Supergraph in cache under key "${UNIFIEDGRAPH_CACHE_KEY}" with TTL ${ttl}s`, - e, + const logCacheSetError = (err: unknown) => { + this.opts.transportContext?.log.debug( + { err, ttl, key: UNIFIEDGRAPH_CACHE_KEY }, + 'Unable to cache Supergraph', ); }; try { - const cacheSet$ = this.opts.transportContext.cache.set( + const cacheSet$ = this.opts.transportContext?.cache.set( UNIFIEDGRAPH_CACHE_KEY, serializedUnifiedGraph, { ttl }, @@ -289,10 +292,10 @@ export class UnifiedGraphManager implements AsyncDisposable { } catch (e) { logCacheSetError(e); } - } catch (e) { - this.opts.transportContext.logger?.error( + } catch (err: any) { + this.opts.transportContext?.log.error( + err, 'Failed to initiate caching of Supergraph', - e, ); } } @@ -318,7 +321,7 @@ export class UnifiedGraphManager implements AsyncDisposable { onDelegationPlanHooks: this.onDelegationPlanHooks, onDelegationStageExecuteHooks: this.onDelegationStageExecuteHooks, onDelegateHooks: this.opts.onDelegateHooks, - logger: this.opts.transportContext?.logger, + log: this.opts.transportContext?.log, }); const transportExecutorStack = new AsyncDisposableStack(); const onSubgraphExecute = getOnSubgraphExecute({ @@ -360,7 +363,7 @@ export class UnifiedGraphManager implements AsyncDisposable { }, }, ); - this.opts.transportContext?.logger?.debug( + this.opts.transportContext?.log.debug( 'Supergraph has been changed, updating...', ); } @@ -372,9 +375,9 @@ export class UnifiedGraphManager implements AsyncDisposable { }, (err) => { this.disposeReason = undefined; - this.opts.transportContext?.logger?.error( - 'Failed to dispose the existing transports and executors', + this.opts.transportContext?.log.error( err, + 'Failed to dispose the existing transports and executors', ); return this.unifiedGraph!; }, @@ -392,14 +395,11 @@ export class UnifiedGraphManager implements AsyncDisposable { private getAndSetUnifiedGraph(): MaybePromise { return handleMaybePromise( - () => this.opts.getUnifiedGraph(this.opts.transportContext || {}), + () => this.opts.getUnifiedGraph(this.opts.transportContext), (loadedUnifiedGraph: string | GraphQLSchema | DocumentNode) => this.handleLoadedUnifiedGraph(loadedUnifiedGraph), (err) => { - this.opts.transportContext?.logger?.error( - 'Failed to load Supergraph', - err, - ); + this.opts.transportContext?.log.error(err, 'Failed to load Supergraph'); this.lastLoadTime = Date.now(); this.disposeReason = undefined; this.polling$ = undefined; diff --git a/packages/fusion-runtime/src/utils.ts b/packages/fusion-runtime/src/utils.ts index c43645cae..73fc867ce 100644 --- a/packages/fusion-runtime/src/utils.ts +++ b/packages/fusion-runtime/src/utils.ts @@ -1,4 +1,6 @@ import { getInstrumented } from '@envelop/instrumentation'; +import { LegacyLogger, Logger } from '@graphql-hive/logger'; +import { loggerForRequest } from '@graphql-hive/logger/request'; import { defaultPrintFn, type Transport, @@ -7,13 +9,7 @@ import { type TransportGetSubgraphExecutor, type TransportGetSubgraphExecutorOptions, } from '@graphql-mesh/transport-common'; -import type { Logger } from '@graphql-mesh/types'; -import { - isDisposable, - iterateAsync, - loggerForExecutionRequest, - requestIdByRequest, -} from '@graphql-mesh/utils'; +import { isDisposable, iterateAsync } from '@graphql-mesh/utils'; import { getBatchingExecutor } from '@graphql-tools/batch-execute'; import { DelegationPlanBuilder, @@ -112,22 +108,15 @@ function getTransportExecutor({ transports = defaultTransportsGetter, getDisposeReason, }: { - transportContext: TransportContext; + transportContext: TransportContext | undefined; transportEntry: TransportEntry; subgraphName?: string; subgraph: GraphQLSchema; transports?: Transports; getDisposeReason?: () => GraphQLError | undefined; }): MaybePromise { - // TODO const kind = transportEntry?.kind || ''; - let logger = transportContext?.logger; - if (logger) { - if (subgraphName) { - logger = logger.child({ subgraph: subgraphName }); - } - logger?.debug(`Loading transport "${kind}"`); - } + transportContext?.log.debug(`Loading transport "${kind}"`); return handleMaybePromise( () => typeof transports === 'function' ? transports(kind) : transports[kind], @@ -154,6 +143,11 @@ function getTransportExecutor({ `Transport "${kind}" "getSubgraphExecutor" is not a function`, ); } + const log = + transportContext?.log || + // if the logger is not provided by the context, create a new silent one just for consistency in the hooks + new Logger({ level: false }); + const logger = transportContext?.logger || LegacyLogger.from(log); return getSubgraphExecutor({ subgraphName, subgraph, @@ -170,6 +164,8 @@ function getTransportExecutor({ }, getDisposeReason, ...transportContext, + log, + logger, }); }, ); @@ -186,7 +182,7 @@ export const subgraphNameByExecutionRequest = new WeakMap< */ export function getOnSubgraphExecute({ onSubgraphExecuteHooks, - transportContext = {}, + transportContext, transportEntryMap, getSubgraphSchema, transportExecutorStack, @@ -214,18 +210,19 @@ export function getOnSubgraphExecute({ let executor: Executor | undefined = subgraphExecutorMap.get(subgraphName); // If the executor is not initialized yet, initialize it if (executor == null) { - let logger = transportContext?.logger; - if (logger) { - const requestId = requestIdByRequest.get( - executionRequest.context?.request, - ); - if (requestId) { - logger = logger.child({ requestId }); - } + if (transportContext) { + let log = executionRequest.context?.request + ? loggerForRequest( + transportContext.log, + executionRequest.context.request, + ) + : transportContext.log; if (subgraphName) { - logger = logger.child({ subgraph: subgraphName }); + log = log.child({ subgraph: subgraphName }); } - logger.debug(`Initializing executor`); + // overwrite the log in the transport context because now it contains more details + transportContext.log = log; + log.debug('Initializing executor'); } // Lazy executor that loads transport executor on demand executor = function lazyExecutor(subgraphExecReq: ExecutionRequest) { @@ -312,23 +309,14 @@ export function wrapExecutorWithHooks({ baseExecutionRequest.info = baseExecutionRequest.info || ({} as GraphQLResolveInfo); baseExecutionRequest.info.executionRequest = baseExecutionRequest; - // TODO: Fix this in onFetch hook handler of @graphql-mesh/utils + // this rootValue will be set in the info value for field.resolvers in non-graphql requests // TODO: Also consider if a subgraph can ever rely on the gateway's rootValue? baseExecutionRequest.rootValue = { executionRequest: baseExecutionRequest, }; - - const requestId = - baseExecutionRequest.context?.request && - requestIdByRequest.get(baseExecutionRequest.context.request); - let execReqLogger = transportContext?.logger; - if (execReqLogger) { - if (requestId) { - execReqLogger = execReqLogger.child({ requestId }); - } - loggerForExecutionRequest.set(baseExecutionRequest, execReqLogger); - } - execReqLogger = execReqLogger?.child?.({ subgraph: subgraphName }); + const log = + transportContext?.log.child({ subgraph: subgraphName }) || + new Logger({ attrs: { subgraph: subgraphName } }); if (onSubgraphExecuteHooks.length === 0) { return baseExecutor(baseExecutionRequest); } @@ -354,11 +342,10 @@ export function wrapExecutorWithHooks({ }, executor, setExecutor(newExecutor) { - execReqLogger?.debug('executor has been updated'); + log.debug('executor has been updated'); executor = newExecutor; }, - requestId, - logger: execReqLogger, + log: log, }), onSubgraphExecuteDoneHooks, ), @@ -378,10 +365,7 @@ export function wrapExecutorWithHooks({ onSubgraphExecuteDoneHook({ result: currentResult, setResult(newResult: ExecutionResult) { - execReqLogger?.debug( - 'overriding result with: ', - newResult, - ); + log.debug('overriding result with: ', newResult); currentResult = newResult; }, }), @@ -421,10 +405,7 @@ export function wrapExecutorWithHooks({ onNext({ result: currentResult, setResult: (res) => { - execReqLogger?.debug( - 'overriding result with: ', - res, - ); + log.debug('overriding result with: ', res); currentResult = res; }, @@ -473,8 +454,7 @@ export interface OnSubgraphExecutePayload { setExecutionRequest(executionRequest: ExecutionRequest): void; executor: Executor; setExecutor(executor: Executor): void; - requestId?: string; - logger?: Logger; + log: Logger; } export interface OnSubgraphExecuteDonePayload { @@ -515,8 +495,7 @@ export interface OnDelegationPlanHookPayload { fragments: Record; fieldNodes: SelectionNode[]; context: TContext; - requestId?: string; - logger?: Logger; + log: Logger; info?: GraphQLResolveInfo; delegationPlanBuilder: DelegationPlanBuilder; setDelegationPlanBuilder(delegationPlanBuilder: DelegationPlanBuilder): void; @@ -552,8 +531,7 @@ export interface OnDelegationStageExecutePayload { typeName: string; - requestId?: string; - logger?: Logger; + log: Logger; } export type OnDelegationStageExecuteDoneHook = ( @@ -600,19 +578,11 @@ export function wrapMergedTypeResolver>( originalResolver: MergedTypeResolver, typeName: string, onDelegationStageExecuteHooks: OnDelegationStageExecuteHook[], - baseLogger?: Logger, + log: Logger, ): MergedTypeResolver { return (object, context, info, subschema, selectionSet, key, type) => { - let logger = baseLogger; - let requestId: string | undefined; - if (logger && context['request']) { - requestId = requestIdByRequest.get(context['request']); - if (requestId) { - logger = logger.child({ requestId }); - } - } if (subschema.name) { - logger = logger?.child({ subgraph: subschema.name }); + log = log.child({ subgraph: subschema.name }); } let resolver = originalResolver as MergedTypeResolver; function setResolver(newResolver: MergedTypeResolver) { @@ -631,8 +601,7 @@ export function wrapMergedTypeResolver>( key, typeName, type, - requestId, - logger, + log, resolver, setResolver, }); diff --git a/packages/fusion-runtime/tests/polling.test.ts b/packages/fusion-runtime/tests/polling.test.ts index b8e9829ea..cb7f7ab43 100644 --- a/packages/fusion-runtime/tests/polling.test.ts +++ b/packages/fusion-runtime/tests/polling.test.ts @@ -1,4 +1,5 @@ import { setTimeout } from 'timers/promises'; +import { LegacyLogger, Logger } from '@graphql-hive/logger'; import { getUnifiedGraphGracefully } from '@graphql-mesh/fusion-composition'; import { getExecutorForUnifiedGraph } from '@graphql-mesh/fusion-runtime'; import { @@ -21,7 +22,6 @@ import { import { ExecutionResult, GraphQLSchema, parse } from 'graphql'; import { createSchema } from 'graphql-yoga'; import { describe, expect, it, vi } from 'vitest'; -import { getDefaultLogger } from '../../runtime/src/getDefaultLogger'; import { UnifiedGraphManager } from '../src/unifiedGraphManager'; describe('Polling', () => { @@ -374,20 +374,18 @@ describe('Polling', () => { const unifiedGraphFetcher = vi.fn(() => { return graphDeferred ? graphDeferred.promise : unifiedGraph; }); - const logger = getDefaultLogger(); + const log = new Logger(); await using executor = getExecutorForUnifiedGraph({ getUnifiedGraph: unifiedGraphFetcher, pollingInterval: 10_000, - transportContext: { - logger, - }, + transportContext: { log, logger: LegacyLogger.from(log) }, transports() { - logger.debug('transports'); + log.debug('transports'); return { getSubgraphExecutor() { - logger.debug('getSubgraphExecutor'); + log.debug('getSubgraphExecutor'); return function dynamicExecutor(...args) { - logger.debug('dynamicExecutor'); + log.debug('dynamicExecutor'); return createDefaultExecutor(schema)(...args); }; }, diff --git a/packages/fusion-runtime/tests/runtime.test.ts b/packages/fusion-runtime/tests/runtime.test.ts index ab2171a66..0224582b2 100644 --- a/packages/fusion-runtime/tests/runtime.test.ts +++ b/packages/fusion-runtime/tests/runtime.test.ts @@ -1,4 +1,5 @@ import { buildSubgraphSchema } from '@apollo/subgraph'; +import { Logger } from '@graphql-hive/logger'; import { OnDelegationPlanDoneHook, OnDelegationPlanHook, @@ -264,7 +265,7 @@ describe('onDelegationPlanHook', () => { context, delegationPlanBuilder: expect.any(Function), setDelegationPlanBuilder: expect.any(Function), - logger: undefined, + log: expect.any(Logger), info: expect.any(Object), }); expect( diff --git a/packages/fusion-runtime/tests/utils.ts b/packages/fusion-runtime/tests/utils.ts index c723bf360..36eed7d76 100644 --- a/packages/fusion-runtime/tests/utils.ts +++ b/packages/fusion-runtime/tests/utils.ts @@ -1,3 +1,4 @@ +import { LegacyLogger, Logger } from '@graphql-hive/logger'; import { getUnifiedGraphGracefully, type SubgraphConfig, @@ -44,8 +45,13 @@ export function composeAndGetExecutor( subgraphs: SubgraphConfig[], opts?: Partial>, ) { + const log = new Logger({ level: false }); const manager = new UnifiedGraphManager({ getUnifiedGraph: () => getUnifiedGraphGracefully(subgraphs), + transportContext: { + log, + logger: LegacyLogger.from(log), + }, transports() { return { getSubgraphExecutor({ subgraphName }) { diff --git a/packages/gateway/package.json b/packages/gateway/package.json index 3d57aafec..c9fb573bc 100644 --- a/packages/gateway/package.json +++ b/packages/gateway/package.json @@ -55,6 +55,7 @@ "@escape.tech/graphql-armor-max-tokens": "^2.5.0", "@graphql-hive/gateway-runtime": "workspace:^", "@graphql-hive/importer": "workspace:^", + "@graphql-hive/logger": "workspace:^", "@graphql-hive/plugin-aws-sigv4": "workspace:^", "@graphql-hive/plugin-deduplicate-request": "workspace:^", "@graphql-hive/pubsub": "workspace:^", diff --git a/packages/gateway/src/bin.ts b/packages/gateway/src/bin.ts index 5fce1be5e..42edd5dc7 100644 --- a/packages/gateway/src/bin.ts +++ b/packages/gateway/src/bin.ts @@ -3,7 +3,7 @@ import 'dotenv/config'; // inject dotenv options to process.env import module from 'node:module'; import type { InitializeData } from '@graphql-hive/importer/hooks'; -import { getDefaultLogger } from '../../runtime/src/getDefaultLogger'; +import { Logger } from '@graphql-hive/logger'; import { enableModuleCachingIfPossible, handleNodeWarnings, run } from './cli'; // @inject-version globalThis.__VERSION__ here @@ -20,7 +20,7 @@ module.register('@graphql-hive/importer/hooks', { enableModuleCachingIfPossible(); handleNodeWarnings(); -const log = getDefaultLogger(); +const log = new Logger(); run({ log }).catch((err) => { log.error(err); diff --git a/packages/gateway/src/cli.ts b/packages/gateway/src/cli.ts index 45e8f1b88..2c9e4258b 100644 --- a/packages/gateway/src/cli.ts +++ b/packages/gateway/src/cli.ts @@ -15,16 +15,16 @@ import { type GatewayGraphOSReportingOptions, type GatewayHiveReportingOptions, } from '@graphql-hive/gateway-runtime'; +import { Logger } from '@graphql-hive/logger'; import type { AWSSignv4PluginOptions } from '@graphql-hive/plugin-aws-sigv4'; import { HivePubSub } from '@graphql-hive/pubsub'; import type UpstashRedisCache from '@graphql-mesh/cache-upstash-redis'; import type { JWTAuthPluginOptions } from '@graphql-mesh/plugin-jwt-auth'; import type { OpenTelemetryMeshPluginOptions } from '@graphql-mesh/plugin-opentelemetry'; import type { PrometheusPluginOptions } from '@graphql-mesh/plugin-prometheus'; -import type { KeyValueCache, Logger, YamlConfig } from '@graphql-mesh/types'; +import type { KeyValueCache, YamlConfig } from '@graphql-mesh/types'; import { renderGraphiQL } from '@graphql-yoga/render-graphiql'; import parseDuration from 'parse-duration'; -import { getDefaultLogger } from '../../runtime/src/getDefaultLogger'; import { addCommands } from './commands/index'; import { createDefaultConfigPaths } from './config'; import { getMaxConcurrency } from './getMaxConcurrency'; @@ -105,7 +105,7 @@ export interface GatewayCLIProxyConfig } export type KeyValueCacheFactoryFn = (ctx: { - logger: Logger; + log: Logger; pubsub: HivePubSub; cwd: string; }) => KeyValueCache; @@ -400,7 +400,7 @@ let cli = new Command() export async function run(userCtx: Partial) { const ctx: CLIContext = { - log: userCtx.log || getDefaultLogger(), + log: userCtx.log || new Logger(), productName: 'Hive Gateway', productDescription: 'Federated GraphQL Gateway', productPackageName: '@graphql-hive/gateway', diff --git a/packages/gateway/src/commands/handleFork.ts b/packages/gateway/src/commands/handleFork.ts index c093a90f8..a5c56091b 100644 --- a/packages/gateway/src/commands/handleFork.ts +++ b/packages/gateway/src/commands/handleFork.ts @@ -1,5 +1,5 @@ import cluster, { type Worker } from 'node:cluster'; -import type { Logger } from '@graphql-mesh/types'; +import type { Logger } from '@graphql-hive/logger'; import { registerTerminateHandler } from '@graphql-mesh/utils'; /** @@ -22,25 +22,23 @@ export function handleFork(log: Logger, config: { fork?: number }): boolean { logData['code'] = code; } if (expectedToExit) { - workerLogger.debug('exited', logData); + workerLogger.debug(logData, 'exited'); } else { workerLogger.error( - 'exited unexpectedly. A restart is recommended to ensure the stability of the service', logData, + 'Exited unexpectedly. A restart is recommended to ensure the stability of the service', ); } workers.delete(worker); if (!expectedToExit && workers.size === 0) { - log.error(`All workers exited unexpectedly. Exiting`, logData); + log.error(logData, 'All workers exited unexpectedly. Exiting...'); process.exit(1); } }); workers.add(worker); } registerTerminateHandler((signal) => { - log.info('Killing workers', { - signal, - }); + log.info(`Killing workers on ${signal}`); expectedToExit = true; workers.forEach((w) => { w.kill(signal); @@ -49,7 +47,11 @@ export function handleFork(log: Logger, config: { fork?: number }): boolean { return true; } } catch (e) { - log.error(`Error while forking workers: `, e); + log.error( + // @ts-expect-error very likely an instanceof error + e, + 'Error while forking workers', + ); } return false; } diff --git a/packages/gateway/src/commands/handleLoggingOption.ts b/packages/gateway/src/commands/handleLoggingOption.ts deleted file mode 100644 index 4b9c213b6..000000000 --- a/packages/gateway/src/commands/handleLoggingOption.ts +++ /dev/null @@ -1,18 +0,0 @@ -import { - handleLoggingConfig as handleLoggingConfigRuntime, - LogLevel, -} from '@graphql-hive/gateway-runtime'; -import { Logger } from '@graphql-mesh/types'; -import { CLIContext } from '..'; - -export function handleLoggingConfig( - loggingConfig: - | boolean - | Logger - | LogLevel - | keyof typeof LogLevel - | undefined, - ctx: CLIContext, -) { - ctx.log = handleLoggingConfigRuntime(loggingConfig, ctx.log); -} diff --git a/packages/gateway/src/commands/handleReportingConfig.ts b/packages/gateway/src/commands/handleReportingConfig.ts index f6bea3354..3434d5da3 100644 --- a/packages/gateway/src/commands/handleReportingConfig.ts +++ b/packages/gateway/src/commands/handleReportingConfig.ts @@ -37,28 +37,28 @@ export function handleReportingConfig( if (cliOpts.hiveRegistryToken && cliOpts.hiveUsageAccessToken) { ctx.log.error( - `Cannot use "--hive-registry-token" with "--hive-usage-access-token". Please use "--hive-usage-target" and "--hive-usage-access-token" or the config instead.`, + 'Cannot use "--hive-registry-token" with "--hive-usage-access-token". Please use "--hive-usage-target" and "--hive-usage-access-token" or the config instead.', ); process.exit(1); } if (cliOpts.hiveRegistryToken && opts.hiveUsageTarget) { ctx.log.error( - `Cannot use "--hive-registry-token" with a target. Please use "--hive-usage-target" and "--hive-usage-access-token" or the config instead.`, + 'Cannot use "--hive-registry-token" with a target. Please use "--hive-usage-target" and "--hive-usage-access-token" or the config instead.', ); process.exit(1); } if (opts.hiveUsageTarget && !opts.hiveUsageAccessToken) { ctx.log.error( - `Hive usage target needs an access token. Please provide it through the "--hive-usage-access-token " option or the config.`, + 'Hive usage target needs an access token. Please provide it through the "--hive-usage-access-token " option or the config.', ); process.exit(1); } if (opts.hiveUsageAccessToken && !opts.hiveUsageTarget) { ctx.log.error( - `Hive usage access token needs a target. Please provide it through the "--hive-usage-target " option or the config.`, + 'Hive usage access token needs a target. Please provide it through the "--hive-usage-target " option or the config.', ); process.exit(1); } @@ -68,9 +68,9 @@ export function handleReportingConfig( if (hiveUsageAccessToken) { // different logs w and w/o the target to disambiguate if (opts.hiveUsageTarget) { - ctx.log.info(`Configuring Hive usage reporting`); + ctx.log.info('Configuring Hive usage reporting'); } else { - ctx.log.info(`Configuring Hive registry reporting`); + ctx.log.info('Configuring Hive registry reporting'); } return { ...loadedConfig.reporting, @@ -81,7 +81,7 @@ export function handleReportingConfig( } if (opts.apolloKey) { - ctx.log.info(`Configuring Apollo GraphOS registry reporting`); + ctx.log.info('Configuring Apollo GraphOS registry reporting'); if (!opts.apolloGraphRef?.includes('@')) { ctx.log.error( `Apollo GraphOS requires a graph ref in the format @. Please provide a valid graph ref ${opts.apolloGraphRef ? `not ${opts.apolloGraphRef}` : ''}.`, diff --git a/packages/gateway/src/commands/proxy.ts b/packages/gateway/src/commands/proxy.ts index 14bf10688..781308617 100644 --- a/packages/gateway/src/commands/proxy.ts +++ b/packages/gateway/src/commands/proxy.ts @@ -1,6 +1,7 @@ import cluster from 'node:cluster'; import { createGatewayRuntime, + createLoggerFromLogging, type GatewayConfigProxy, } from '@graphql-hive/gateway-runtime'; import { PubSub } from '@graphql-hive/pubsub'; @@ -18,7 +19,6 @@ import { } from '../config'; import { startServerForRuntime } from '../servers/startServerForRuntime'; import { handleFork } from './handleFork'; -import { handleLoggingConfig } from './handleLoggingOption'; import { handleReportingConfig } from './handleReportingConfig'; export const addCommand: AddCommand = (ctx, cli) => @@ -44,6 +44,9 @@ export const addCommand: AddCommand = (ctx, cli) => hivePersistedDocumentsToken, ...opts } = this.optsWithGlobals(); + + ctx.log.info(`Starting ${ctx.productName} ${ctx.version} in proxy mode`); + const loadedConfig = await loadConfig({ log: ctx.log, configPath: opts.configPath, @@ -69,11 +72,10 @@ export const addCommand: AddCommand = (ctx, cli) => const hiveCdnEndpointOpt = // TODO: take schema from optsWithGlobals once https://github.com/commander-js/extra-typings/pull/76 is merged this.opts().schema || hiveCdnEndpoint; - const hiveCdnLogger = ctx.log.child({ source: 'Hive CDN' }); if (hiveCdnEndpointOpt) { if (hiveCdnKey) { if (!isUrl(hiveCdnEndpointOpt)) { - hiveCdnLogger.error( + ctx.log.error( 'Endpoint must be a URL when providing --hive-cdn-key but got ' + hiveCdnEndpointOpt, ); @@ -115,11 +117,11 @@ export const addCommand: AddCommand = (ctx, cli) => const pubsub = loadedConfig.pubsub || new PubSub(); const cwd = loadedConfig.cwd || process.cwd(); if (loadedConfig.logging != null) { - handleLoggingConfig(loadedConfig.logging, ctx); + ctx.log = createLoggerFromLogging(loadedConfig.logging); } const cache = await getCacheInstanceFromConfig(loadedConfig, { pubsub, - logger: ctx.log, + log: ctx.log, cwd, }); const builtinPlugins = await getBuiltinPluginsFromConfig( @@ -128,7 +130,7 @@ export const addCommand: AddCommand = (ctx, cli) => ...opts, }, { - logger: ctx.log, + log: ctx.log, cache, pubsub, cwd, diff --git a/packages/gateway/src/commands/subgraph.ts b/packages/gateway/src/commands/subgraph.ts index 64b5cdf1c..b76b82e50 100644 --- a/packages/gateway/src/commands/subgraph.ts +++ b/packages/gateway/src/commands/subgraph.ts @@ -3,6 +3,7 @@ import { lstat } from 'node:fs/promises'; import { isAbsolute, resolve } from 'node:path'; import { createGatewayRuntime, + createLoggerFromLogging, type GatewayConfigSubgraph, type UnifiedGraphConfig, } from '@graphql-hive/gateway-runtime'; @@ -22,7 +23,6 @@ import { } from '../config'; import { startServerForRuntime } from '../servers/startServerForRuntime'; import { handleFork } from './handleFork'; -import { handleLoggingConfig } from './handleLoggingOption'; import { handleReportingConfig } from './handleReportingConfig'; export const addCommand: AddCommand = (ctx, cli) => @@ -45,6 +45,9 @@ export const addCommand: AddCommand = (ctx, cli) => hivePersistedDocumentsToken, ...opts } = this.optsWithGlobals(); + + ctx.log.info(`Starting ${ctx.productName} ${ctx.version} as subgraph`); + const loadedConfig = await loadConfig({ log: ctx.log, configPath: opts.configPath, @@ -75,11 +78,11 @@ export const addCommand: AddCommand = (ctx, cli) => const pubsub = loadedConfig.pubsub || new PubSub(); const cwd = loadedConfig.cwd || process.cwd(); if (loadedConfig.logging != null) { - handleLoggingConfig(loadedConfig.logging, ctx); + ctx.log = createLoggerFromLogging(loadedConfig.logging); } const cache = await getCacheInstanceFromConfig(loadedConfig, { pubsub, - logger: ctx.log, + log: ctx.log, cwd, }); const builtinPlugins = await getBuiltinPluginsFromConfig( @@ -88,7 +91,7 @@ export const addCommand: AddCommand = (ctx, cli) => ...opts, }, { - logger: ctx.log, + log: ctx.log, cache, pubsub, cwd, diff --git a/packages/gateway/src/commands/supergraph.ts b/packages/gateway/src/commands/supergraph.ts index 6c17f1364..be111668b 100644 --- a/packages/gateway/src/commands/supergraph.ts +++ b/packages/gateway/src/commands/supergraph.ts @@ -4,6 +4,7 @@ import { isAbsolute, resolve } from 'node:path'; import { Option } from '@commander-js/extra-typings'; import { createGatewayRuntime, + createLoggerFromLogging, type GatewayConfigSupergraph, type GatewayGraphOSManagedFederationOptions, type GatewayHiveCDNOptions, @@ -28,7 +29,6 @@ import { } from '../config'; import { startServerForRuntime } from '../servers/startServerForRuntime'; import { handleFork } from './handleFork'; -import { handleLoggingConfig } from './handleLoggingOption'; import { handleReportingConfig } from './handleReportingConfig'; export const addCommand: AddCommand = (ctx, cli) => @@ -65,6 +65,10 @@ export const addCommand: AddCommand = (ctx, cli) => // TODO: move to optsWithGlobals once https://github.com/commander-js/extra-typings/pull/76 is merged const { apolloUplink } = this.opts(); + ctx.log.info( + `Starting ${ctx.productName} ${ctx.version} with supergraph`, + ); + const loadedConfig = await loadConfig({ log: ctx.log, configPath: opts.configPath, @@ -75,15 +79,14 @@ export const addCommand: AddCommand = (ctx, cli) => let supergraph: | UnifiedGraphConfig | GatewayHiveCDNOptions - | GatewayGraphOSManagedFederationOptions = 'supergraph.graphql'; + | GatewayGraphOSManagedFederationOptions = './supergraph.graphql'; if (schemaPathOrUrl) { ctx.log.info(`Supergraph will be loaded from ${schemaPathOrUrl}`); if (hiveCdnKey) { - ctx.log.info(`Using Hive CDN key`); + ctx.log.info('Using Hive CDN key'); if (!isUrl(schemaPathOrUrl)) { ctx.log.error( - 'Hive CDN endpoint must be a URL when providing --hive-cdn-key but got ' + - schemaPathOrUrl, + `Hive CDN endpoint must be a URL when providing --hive-cdn-key but got ${schemaPathOrUrl}`, ); process.exit(1); } @@ -93,7 +96,7 @@ export const addCommand: AddCommand = (ctx, cli) => key: hiveCdnKey, }; } else if (apolloKey) { - ctx.log.info(`Using GraphOS API key`); + ctx.log.info('Using GraphOS API key'); if (!schemaPathOrUrl.includes('@')) { ctx.log.error( `Apollo GraphOS requires a graph ref in the format @ when providing --apollo-key. Please provide a valid graph ref not ${schemaPathOrUrl}.`, @@ -123,7 +126,7 @@ export const addCommand: AddCommand = (ctx, cli) => ); process.exit(1); } - ctx.log.info(`Using Hive CDN endpoint: ${hiveCdnEndpoint}`); + ctx.log.info(`Using Hive CDN endpoint ${hiveCdnEndpoint}`); supergraph = { type: 'hive', endpoint: hiveCdnEndpoint, @@ -138,11 +141,11 @@ export const addCommand: AddCommand = (ctx, cli) => } if (!apolloKey) { ctx.log.error( - `Apollo GraphOS requires an API key. Please provide an API key using the --apollo-key option.`, + 'Apollo GraphOS requires an API key. Please provide an API key using the --apollo-key option.', ); process.exit(1); } - ctx.log.info(`Using Apollo Graph Ref: ${apolloGraphRef}`); + ctx.log.info(`Using Apollo Graph Ref ${apolloGraphRef}`); supergraph = { type: 'graphos', apiKey: apolloKey, @@ -153,7 +156,7 @@ export const addCommand: AddCommand = (ctx, cli) => supergraph = loadedConfig.supergraph!; // TODO: assertion wont be necessary when exactOptionalPropertyTypes // TODO: how to provide hive-cdn-key? } else { - ctx.log.info(`Using default supergraph location: ${supergraph}`); + ctx.log.info(`Using default supergraph location "${supergraph}"`); } const registryConfig: Pick = {}; @@ -171,11 +174,11 @@ export const addCommand: AddCommand = (ctx, cli) => const pubsub = loadedConfig.pubsub || new PubSub(); const cwd = loadedConfig.cwd || process.cwd(); if (loadedConfig.logging != null) { - handleLoggingConfig(loadedConfig.logging, ctx); + ctx.log = createLoggerFromLogging(loadedConfig.logging); } const cache = await getCacheInstanceFromConfig(loadedConfig, { pubsub, - logger: ctx.log, + log: ctx.log, cwd, }); const builtinPlugins = await getBuiltinPluginsFromConfig( @@ -184,7 +187,7 @@ export const addCommand: AddCommand = (ctx, cli) => ...opts, }, { - logger: ctx.log, + log: ctx.log, cache, pubsub, cwd, @@ -224,7 +227,7 @@ export const addCommand: AddCommand = (ctx, cli) => loadedConfig.persistedDocuments.token); if (!token) { ctx.log.error( - `Hive persisted documents needs a CDN token. Please provide it through the "--hive-persisted-documents-token " option or the config.`, + 'Hive persisted documents needs a CDN token. Please provide it through the "--hive-persisted-documents-token " option or the config.', ); process.exit(1); } @@ -270,12 +273,13 @@ export async function runSupergraph( absSchemaPath = isAbsolute(supergraphPath) ? String(supergraphPath) : resolve(process.cwd(), supergraphPath); - log.info(`Reading supergraph from ${absSchemaPath}`); + log.info({ path: absSchemaPath }, 'Reading supergraph'); try { await lstat(absSchemaPath); - } catch { + } catch (err) { log.error( - `Could not read supergraph from ${absSchemaPath}. Make sure the file exists.`, + { path: absSchemaPath, err }, + 'Could not read supergraph. Make sure the file exists.', ); process.exit(1); } @@ -285,11 +289,14 @@ export async function runSupergraph( // Polling should not be enabled when watching the file delete config.pollingInterval; if (cluster.isPrimary) { - log.info(`Watching ${absSchemaPath} for changes`); + log.info({ path: absSchemaPath }, 'Watching supergraph for changes'); const ctrl = new AbortController(); registerTerminateHandler((signal) => { - log.info(`Closing watcher for ${absSchemaPath} on ${signal}`); + log.info( + { path: absSchemaPath }, + `Closing watcher for supergraph on ${signal}`, + ); return ctrl.abort(`Process terminated on ${signal}`); }); @@ -301,7 +308,10 @@ export async function runSupergraph( // TODO: or should we just ignore? throw new Error(`Supergraph file was renamed to "${f.filename}"`); } - log.info(`${absSchemaPath} changed. Invalidating supergraph...`); + log.info( + { path: absSchemaPath }, + 'Supergraph changed. Invalidating...', + ); if (config.fork && config.fork > 1) { for (const workerId in cluster.workers) { cluster.workers[workerId]!.send('invalidateUnifiedGraph'); @@ -314,10 +324,16 @@ export async function runSupergraph( })() .catch((e) => { if (e.name === 'AbortError') return; - log.error(`Watcher for ${absSchemaPath} closed with an error`, e); + log.error( + { path: absSchemaPath, err: e }, + 'Supergraph watcher closed with an error', + ); }) .then(() => { - log.info(`Watcher for ${absSchemaPath} successfuly closed`); + log.info( + { path: absSchemaPath }, + 'Supergraph watcher successfuly closed', + ); }); } } @@ -352,16 +368,17 @@ export async function runSupergraph( const runtime = createGatewayRuntime(config); if (absSchemaPath) { - log.info(`Serving local supergraph from ${absSchemaPath}`); + log.info({ path: absSchemaPath }, 'Serving local supergraph'); } else if (isUrl(String(config.supergraph))) { - log.info(`Serving remote supergraph from ${config.supergraph}`); + log.info({ url: config.supergraph }, 'Serving remote supergraph'); } else if ( typeof config.supergraph === 'object' && 'type' in config.supergraph && config.supergraph.type === 'hive' ) { log.info( - `Serving supergraph from Hive CDN at ${config.supergraph.endpoint}`, + { endpoint: config.supergraph.endpoint }, + 'Serving supergraph from Hive CDN', ); } else { log.info('Serving supergraph from config'); diff --git a/packages/gateway/src/config.ts b/packages/gateway/src/config.ts index 9de1b580c..fcbda7647 100644 --- a/packages/gateway/src/config.ts +++ b/packages/gateway/src/config.ts @@ -5,8 +5,9 @@ import type { GatewayConfig, GatewayPlugin, } from '@graphql-hive/gateway-runtime'; +import { LegacyLogger, type Logger } from '@graphql-hive/logger'; import { HivePubSub } from '@graphql-hive/pubsub'; -import type { KeyValueCache, Logger } from '@graphql-mesh/types'; +import type { KeyValueCache } from '@graphql-mesh/types'; import type { GatewayCLIBuiltinPluginConfig } from './cli'; import type { ServerConfig } from './servers/types'; @@ -105,7 +106,7 @@ export async function getBuiltinPluginsFromConfig( config: GatewayCLIBuiltinPluginConfig, ctx: { cache: KeyValueCache; - logger: Logger; + log: Logger; pubsub: HivePubSub; cwd: string; }, @@ -125,12 +126,7 @@ export async function getBuiltinPluginsFromConfig( const { useOpenTelemetry } = await import( '@graphql-mesh/plugin-opentelemetry' ); - plugins.push( - useOpenTelemetry({ - logger: ctx.logger, - ...config.openTelemetry, - }), - ); + plugins.push(useOpenTelemetry({ ...config.openTelemetry, log: ctx.log })); } if (config.rateLimiting) { @@ -204,7 +200,7 @@ export async function getBuiltinPluginsFromConfig( */ export async function getCacheInstanceFromConfig( config: GatewayCLIBuiltinPluginConfig, - ctx: { logger: Logger; pubsub: HivePubSub; cwd: string }, + ctx: { log: Logger; pubsub: HivePubSub; cwd: string }, ): Promise { if (typeof config.cache === 'function') { return config.cache(ctx); @@ -219,6 +215,8 @@ export async function getCacheInstanceFromConfig( return new RedisCache({ ...ctx, ...config.cache, + // TODO: use new logger + logger: LegacyLogger.from(ctx.log), }) as KeyValueCache; } case 'cfw-kv': { @@ -241,7 +239,7 @@ export async function getCacheInstanceFromConfig( } } if (config.cache.type !== 'localforage') { - ctx.logger.warn( + ctx.log.warn( 'Unknown cache type, falling back to localforage', config.cache, ); diff --git a/packages/gateway/src/index.ts b/packages/gateway/src/index.ts index 5a01d9d6d..f294a5326 100644 --- a/packages/gateway/src/index.ts +++ b/packages/gateway/src/index.ts @@ -1,6 +1,6 @@ export * from './cli'; +export * from '@graphql-hive/logger'; export * from '@graphql-hive/gateway-runtime'; -export { LogLevel, DefaultLogger } from '@graphql-mesh/utils'; export { PubSub } from '@graphql-hive/pubsub'; export * from '@graphql-mesh/plugin-jwt-auth'; export * from '@graphql-mesh/plugin-opentelemetry'; diff --git a/packages/gateway/src/servers/nodeHttp.ts b/packages/gateway/src/servers/nodeHttp.ts index ad6b7e48c..978c916cb 100644 --- a/packages/gateway/src/servers/nodeHttp.ts +++ b/packages/gateway/src/servers/nodeHttp.ts @@ -98,12 +98,12 @@ export async function startNodeHttpServer>( gwRuntime.disposableStack.defer( () => new Promise((resolve, reject) => { - log.info(`Stopping the WebSocket server`); + log.info('Stopping the WebSocket server'); wsServer.close((err) => { if (err) { return reject(err); } - log.info(`Stopped the WebSocket server successfully`); + log.info('Stopped the WebSocket server successfully'); return resolve(); }); }), @@ -117,10 +117,10 @@ export async function startNodeHttpServer>( () => new Promise((resolve) => { process.stderr.write('\n'); - log.info(`Stopping the server`); + log.info('Stopping the server'); server.closeAllConnections(); server.close(() => { - log.info(`Stopped the server successfully`); + log.info('Stopped the server successfully'); return resolve(); }); }), diff --git a/packages/gateway/src/servers/startServerForRuntime.ts b/packages/gateway/src/servers/startServerForRuntime.ts index 6cd4e14a8..0826a0851 100644 --- a/packages/gateway/src/servers/startServerForRuntime.ts +++ b/packages/gateway/src/servers/startServerForRuntime.ts @@ -20,7 +20,7 @@ export function startServerForRuntime< ): MaybePromise { process.on('message', (message) => { if (message === 'invalidateUnifiedGraph') { - log.info(`Invalidating Supergraph`); + log.info('Invalidating Supergraph'); runtime.invalidateUnifiedGraph(); } }); diff --git a/packages/gateway/src/servers/types.ts b/packages/gateway/src/servers/types.ts index e29222e46..950901a53 100644 --- a/packages/gateway/src/servers/types.ts +++ b/packages/gateway/src/servers/types.ts @@ -1,4 +1,4 @@ -import { Logger } from '@graphql-mesh/types'; +import type { Logger } from '@graphql-hive/logger'; export interface ServerConfig { /** diff --git a/packages/logger-json/CHANGELOG.md b/packages/logger-json/CHANGELOG.md deleted file mode 100644 index 12d75dc21..000000000 --- a/packages/logger-json/CHANGELOG.md +++ /dev/null @@ -1,40 +0,0 @@ -# @graphql-hive/logger-json - -## 0.0.4 - -### Patch Changes - -- [#946](https://github.com/graphql-hive/gateway/pull/946) [`7d771d8`](https://github.com/graphql-hive/gateway/commit/7d771d89ff6d731b1025acfc5eb197541a6d5d35) Thanks [@ardatan](https://github.com/ardatan)! - dependencies updates: - - - Updated dependency [`@graphql-mesh/utils@^0.104.2` ↗︎](https://www.npmjs.com/package/@graphql-mesh/utils/v/0.104.2) (from `^0.104.1`, in `dependencies`) - -## 0.0.3 - -### Patch Changes - -- [#706](https://github.com/graphql-hive/gateway/pull/706) [`e393337`](https://github.com/graphql-hive/gateway/commit/e393337ecb40beffb79748b19b5aa8f2fd9197b7) Thanks [@EmrysMyrddin](https://github.com/EmrysMyrddin)! - dependencies updates: - - - Updated dependency [`@graphql-mesh/utils@^0.104.1` ↗︎](https://www.npmjs.com/package/@graphql-mesh/utils/v/0.104.1) (from `^0.104.0`, in `dependencies`) - -- [#775](https://github.com/graphql-hive/gateway/pull/775) [`33f7dfd`](https://github.com/graphql-hive/gateway/commit/33f7dfdb10eef2a1e7f6dffe0ce6e4bb3cc7c2c6) Thanks [@renovate](https://github.com/apps/renovate)! - dependencies updates: - - - Updated dependency [`@graphql-mesh/types@^0.104.0` ↗︎](https://www.npmjs.com/package/@graphql-mesh/types/v/0.104.0) (from `^0.103.18`, in `dependencies`) - - Updated dependency [`@graphql-mesh/utils@^0.104.0` ↗︎](https://www.npmjs.com/package/@graphql-mesh/utils/v/0.104.0) (from `^0.103.18`, in `dependencies`) - -## 0.0.2 - -### Patch Changes - -- [#697](https://github.com/graphql-hive/gateway/pull/697) [`6cc87c6`](https://github.com/graphql-hive/gateway/commit/6cc87c6e9aa0cbb9eff517eeec92d57b9c96d39e) Thanks [@renovate](https://github.com/apps/renovate)! - dependencies updates: - - - Updated dependency [`@graphql-mesh/types@^0.103.18` ↗︎](https://www.npmjs.com/package/@graphql-mesh/types/v/0.103.18) (from `^0.103.16`, in `dependencies`) - - Updated dependency [`@graphql-mesh/utils@^0.103.18` ↗︎](https://www.npmjs.com/package/@graphql-mesh/utils/v/0.103.18) (from `^0.103.16`, in `dependencies`) - -## 0.0.1 - -### Patch Changes - -- [#642](https://github.com/graphql-hive/gateway/pull/642) [`30e41a6`](https://github.com/graphql-hive/gateway/commit/30e41a6f5b97c42ae548564bce3f6e4a92b1225f) Thanks [@ardatan](https://github.com/ardatan)! - New JSON-based logger - - By default, it prints pretty still to the console unless NODE_ENV is production. - For JSON output, set the `LOG_FORMAT` environment variable to `json`. diff --git a/packages/logger-json/package.json b/packages/logger-json/package.json deleted file mode 100644 index 47cc396d6..000000000 --- a/packages/logger-json/package.json +++ /dev/null @@ -1,56 +0,0 @@ -{ - "name": "@graphql-hive/logger-json", - "version": "0.0.4", - "type": "module", - "repository": { - "type": "git", - "url": "git+https://github.com/graphql-hive/gateway.git", - "directory": "packages/logger-json" - }, - "author": { - "email": "contact@the-guild.dev", - "name": "The Guild", - "url": "https://the-guild.dev" - }, - "license": "MIT", - "engines": { - "node": ">=18.0.0" - }, - "main": "./dist/index.js", - "types": "./dist/index.d.ts", - "exports": { - ".": { - "require": { - "types": "./dist/index.d.cts", - "default": "./dist/index.cjs" - }, - "import": { - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - }, - "./package.json": "./package.json" - }, - "files": [ - "dist" - ], - "scripts": { - "build": "pkgroll --clean-dist", - "prepack": "yarn build" - }, - "peerDependencies": { - "graphql": "^15.9.0 || ^16.9.0" - }, - "dependencies": { - "@graphql-mesh/cross-helpers": "^0.4.10", - "@graphql-mesh/types": "^0.104.0", - "@graphql-mesh/utils": "^0.104.2", - "cross-inspect": "^1.0.1", - "tslib": "^2.8.1" - }, - "devDependencies": { - "graphql": "^16.9.0", - "pkgroll": "2.12.2" - }, - "sideEffects": false -} diff --git a/packages/logger-json/src/index.ts b/packages/logger-json/src/index.ts deleted file mode 100644 index 712d7d7e1..000000000 --- a/packages/logger-json/src/index.ts +++ /dev/null @@ -1,185 +0,0 @@ -import { process } from '@graphql-mesh/cross-helpers'; -import type { LazyLoggerMessage, Logger } from '@graphql-mesh/types'; -import { LogLevel } from '@graphql-mesh/utils'; -import { inspect } from 'cross-inspect'; - -export interface JSONLoggerOptions { - name?: string; - meta?: Record; - level?: LogLevel; - console?: Console; -} -function truthy(val: unknown) { - return ( - val === true || - val === 1 || - ['1', 't', 'true', 'y', 'yes'].includes(String(val)) - ); -} - -declare global { - var DEBUG: string; -} - -export class JSONLogger implements Logger { - name?: string; - meta: Record; - logLevel: LogLevel; - console: Console; - constructor(opts?: JSONLoggerOptions) { - this.name = opts?.name; - this.console = opts?.console || console; - this.meta = opts?.meta || {}; - const debugStrs = [process.env['DEBUG'], globalThis.DEBUG]; - if (opts?.level != null) { - this.logLevel = opts.level; - } else { - this.logLevel = LogLevel.info; - for (const debugStr of debugStrs) { - if (debugStr) { - if (truthy(debugStr)) { - this.logLevel = LogLevel.debug; - break; - } - if (opts?.name) { - if (debugStr?.toString()?.includes(opts.name)) { - this.logLevel = LogLevel.debug; - break; - } - } - } - } - } - } - - log(...messageArgs: LazyLoggerMessage[]) { - if (this.logLevel > LogLevel.info) { - return; - } - const finalMessage = this.prepareFinalMessage('info', messageArgs); - this.console.log(finalMessage); - } - - warn(...messageArgs: LazyLoggerMessage[]) { - if (this.logLevel > LogLevel.warn) { - return; - } - const finalMessage = this.prepareFinalMessage('warn', messageArgs); - this.console.warn(finalMessage); - } - - info(...messageArgs: LazyLoggerMessage[]) { - if (this.logLevel > LogLevel.info) { - return; - } - const finalMessage = this.prepareFinalMessage('info', messageArgs); - this.console.info(finalMessage); - } - - error(...messageArgs: LazyLoggerMessage[]) { - if (this.logLevel > LogLevel.error) { - return; - } - const finalMessage = this.prepareFinalMessage('error', messageArgs); - this.console.error(finalMessage); - } - - debug(...messageArgs: LazyLoggerMessage[]) { - if (this.logLevel > LogLevel.debug) { - return; - } - const finalMessage = this.prepareFinalMessage('debug', messageArgs); - this.console.debug(finalMessage); - } - - child(nameOrMeta: string | Record) { - let newName: string | undefined; - let newMeta: Record; - if (typeof nameOrMeta === 'string') { - newName = this.name ? `${this.name}, ${nameOrMeta}` : nameOrMeta; - newMeta = this.meta; - } else if (typeof nameOrMeta === 'object') { - newName = this.name; - newMeta = { ...this.meta, ...nameOrMeta }; - } else { - throw new Error('Invalid argument type'); - } - return new JSONLogger({ - name: newName, - meta: newMeta, - level: this.logLevel, - console: this.console, - }); - } - - addPrefix(prefix: string | Record) { - if (typeof prefix === 'string') { - this.name = this.name ? `${this.name}, ${prefix}` : prefix; - } else if (typeof prefix === 'object') { - this.meta = { ...this.meta, ...prefix }; - } - return this; - } - - private prepareFinalMessage(level: string, messageArgs: LazyLoggerMessage[]) { - const flattenedMessageArgs = messageArgs - .flat(Infinity) - .flatMap((messageArg) => { - if (typeof messageArg === 'function') { - messageArg = messageArg(); - } - if (messageArg?.toJSON) { - messageArg = messageArg.toJSON(); - } - if (messageArg instanceof AggregateError) { - return messageArg.errors; - } - return messageArg; - }); - const finalMessage: Record = { - ...this.meta, - level, - time: new Date().toISOString(), - }; - if (this.name) { - finalMessage['name'] = this.name; - } - const extras: any[] = []; - for (let messageArg of flattenedMessageArgs) { - if (messageArg == null) { - continue; - } - const typeofMessageArg = typeof messageArg; - if ( - typeofMessageArg === 'string' || - typeofMessageArg === 'number' || - typeofMessageArg === 'boolean' - ) { - finalMessage['msg'] = finalMessage['msg'] - ? finalMessage['msg'] + ', ' + messageArg - : messageArg; - } else if (typeofMessageArg === 'object') { - if (messageArg instanceof Error) { - finalMessage['msg'] = finalMessage['msg'] - ? finalMessage['msg'] + ', ' + messageArg.message - : messageArg.message; - finalMessage['stack'] = messageArg.stack; - } else if ( - Object.prototype.toString.call(messageArg).startsWith('[object') - ) { - Object.assign(finalMessage, messageArg); - } else { - extras.push(messageArg); - } - } - } - if (extras.length) { - if (extras.length === 1) { - finalMessage['extras'] = inspect(extras[0]); - } else { - finalMessage['extras'] = extras.map((extra) => inspect(extra)); - } - } - return JSON.stringify(finalMessage); - } -} diff --git a/packages/logger-pino/CHANGELOG.md b/packages/logger-pino/CHANGELOG.md deleted file mode 100644 index 1086fdd70..000000000 --- a/packages/logger-pino/CHANGELOG.md +++ /dev/null @@ -1,25 +0,0 @@ -# @graphql-hive/logger-pino - -## 1.0.1 - -### Patch Changes - -- [#1156](https://github.com/graphql-hive/gateway/pull/1156) [`fb74009`](https://github.com/graphql-hive/gateway/commit/fb740098652dba2e9107981d1f4e362143478451) Thanks [@dependabot](https://github.com/apps/dependabot)! - dependencies updates: - - - Updated dependency [`pino@^9.7.0` ↗︎](https://www.npmjs.com/package/pino/v/9.7.0) (from `^9.6.0`, in `peerDependencies`) - -## 1.0.0 - -### Major Changes - -- [#946](https://github.com/graphql-hive/gateway/pull/946) [`7d771d8`](https://github.com/graphql-hive/gateway/commit/7d771d89ff6d731b1025acfc5eb197541a6d5d35) Thanks [@ardatan](https://github.com/ardatan)! - New Pino integration (also helpful for Fastify integration); - - ```ts - import { defineConfig } from '@graphql-hive/gateway'; - import { createLoggerFromPino } from '@graphql-hive/logger-pino'; - import pino from 'pino'; - - export const gatewayConfig = defineConfig({ - logging: createLoggerFromPino(pino({ level: 'info' })), - }); - ``` diff --git a/packages/logger-pino/package.json b/packages/logger-pino/package.json deleted file mode 100644 index d42104bca..000000000 --- a/packages/logger-pino/package.json +++ /dev/null @@ -1,58 +0,0 @@ -{ - "name": "@graphql-hive/logger-pino", - "version": "1.0.1", - "type": "module", - "repository": { - "type": "git", - "url": "git+https://github.com/graphql-hive/gateway.git", - "directory": "packages/logger-pino" - }, - "homepage": "https://the-guild.dev/graphql/hive/docs/gateway", - "author": { - "email": "contact@the-guild.dev", - "name": "The Guild", - "url": "https://the-guild.dev" - }, - "license": "MIT", - "engines": { - "node": ">=18.0.0" - }, - "main": "./dist/index.js", - "types": "./dist/index.d.ts", - "exports": { - ".": { - "require": { - "types": "./dist/index.d.cts", - "default": "./dist/index.cjs" - }, - "import": { - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - }, - "./package.json": "./package.json" - }, - "files": [ - "dist" - ], - "scripts": { - "build": "pkgroll --clean-dist", - "prepack": "yarn build" - }, - "peerDependencies": { - "graphql": "^15.9.0 || ^16.9.0", - "pino": "^9.7.0" - }, - "dependencies": { - "@graphql-mesh/types": "^0.104.0", - "@graphql-mesh/utils": "^0.104.2", - "@whatwg-node/disposablestack": "^0.0.6", - "tslib": "^2.8.1" - }, - "devDependencies": { - "graphql": "16.11.0", - "pino": "^9.7.0", - "pkgroll": "2.12.2" - }, - "sideEffects": false -} diff --git a/packages/logger-pino/src/index.ts b/packages/logger-pino/src/index.ts deleted file mode 100644 index bd48c9a3a..000000000 --- a/packages/logger-pino/src/index.ts +++ /dev/null @@ -1,125 +0,0 @@ -import type { - LazyLoggerMessage, - Logger as MeshLogger, -} from '@graphql-mesh/types'; -import { LogLevel } from '@graphql-mesh/utils'; -import type pino from 'pino'; - -type PinoWithChild = pino.BaseLogger & { - child: (meta: any) => PinoWithChild; -}; - -function prepareArgs(messageArgs: LazyLoggerMessage[]): Parameters { - const flattenedMessageArgs = messageArgs - .flat(Infinity) - .flatMap((messageArg) => { - if (typeof messageArg === 'function') { - messageArg = messageArg(); - } - if (messageArg?.toJSON) { - messageArg = messageArg.toJSON(); - } - if (messageArg instanceof AggregateError) { - return messageArg.errors; - } - return messageArg; - }); - let message: string = ''; - const extras: any[] = []; - for (let messageArg of flattenedMessageArgs) { - if (messageArg == null) { - continue; - } - const typeofMessageArg = typeof messageArg; - if ( - typeofMessageArg === 'string' || - typeofMessageArg === 'number' || - typeofMessageArg === 'boolean' - ) { - message = message ? message + ', ' + messageArg : messageArg; - } else if (typeofMessageArg === 'object') { - extras.push(messageArg); - } - } - if (extras.length > 0) { - return [Object.assign({}, ...extras), message]; - } - return [message]; -} - -class PinoLoggerAdapter implements MeshLogger { - public name?: string; - constructor( - private pinoLogger: PinoWithChild, - private meta: Record = {}, - ) { - if (meta['name']) { - this.name = meta['name']; - } - } - - get level(): LogLevel { - if (this.pinoLogger.level) { - return LogLevel[this.pinoLogger.level as keyof typeof LogLevel]; - } - return LogLevel.silent; - } - - set level(level: LogLevel) { - this.pinoLogger.level = LogLevel[level]; - } - - isLevelEnabled(level: LogLevel) { - if (this.level > level) { - return false; - } - return true; - } - - log(...args: any[]) { - if (this.isLevelEnabled(LogLevel.info)) { - this.pinoLogger.info(...prepareArgs(args)); - } - } - info(...args: any[]) { - if (this.isLevelEnabled(LogLevel.info)) { - this.pinoLogger.info(...prepareArgs(args)); - } - } - warn(...args: any[]) { - if (this.isLevelEnabled(LogLevel.warn)) { - this.pinoLogger.warn(...prepareArgs(args)); - } - } - error(...args: any[]) { - if (this.isLevelEnabled(LogLevel.error)) { - this.pinoLogger.error(...prepareArgs(args)); - } - } - debug(...lazyArgs: LazyLoggerMessage[]) { - if (this.isLevelEnabled(LogLevel.debug)) { - this.pinoLogger.debug(...prepareArgs(lazyArgs)); - } - } - child(nameOrMeta: string | Record) { - if (typeof nameOrMeta === 'string') { - nameOrMeta = { - name: this.name - ? this.name.includes(nameOrMeta) - ? this.name - : `${this.name}, ${nameOrMeta}` - : nameOrMeta, - }; - } - return new PinoLoggerAdapter(this.pinoLogger.child(nameOrMeta), { - ...this.meta, - ...nameOrMeta, - }); - } -} - -export function createLoggerFromPino( - pinoLogger: PinoWithChild, -): PinoLoggerAdapter { - return new PinoLoggerAdapter(pinoLogger); -} diff --git a/packages/logger-pino/tests/pino.spec.ts b/packages/logger-pino/tests/pino.spec.ts deleted file mode 100644 index bde7486a7..000000000 --- a/packages/logger-pino/tests/pino.spec.ts +++ /dev/null @@ -1,148 +0,0 @@ -import { hostname } from 'node:os'; -import { Writable } from 'node:stream'; -import pino from 'pino'; -import { describe, expect, it } from 'vitest'; -import { createLoggerFromPino } from '../src'; - -describe('Pino', () => { - let log = ''; - let lastCallback = () => {}; - const stream = new Writable({ - write(chunk, _encoding, callback) { - log = chunk.toString('utf-8'); - lastCallback = callback; - }, - }); - const logLevels = ['error', 'warn', 'info', 'debug'] as const; - for (const level of logLevels) { - describe(`Level: ${level}`, () => { - it('basic', async () => { - const logger = pino( - { - level, - }, - stream, - ); - const loggerAdapter = createLoggerFromPino(logger); - const testData = [ - 'Hello', - ['World'], - { foo: 'bar' }, - 42, - true, - null, - undefined, - () => 'Expensive', - ]; - loggerAdapter[level](...testData); - lastCallback(); - const logJson = JSON.parse(log); - expect(logJson).toEqual({ - level: pino.levels.values[level], - foo: 'bar', - msg: 'Hello, World, 42, true, Expensive', - pid: process.pid, - time: expect.any(Number), - hostname: hostname(), - }); - }); - it('child', async () => { - const logger = pino( - { - level, - }, - stream, - ); - const loggerAdapter = createLoggerFromPino(logger); - const testData = [ - 'Hello', - ['World'], - { foo: 'bar' }, - 42, - true, - null, - undefined, - () => 'Expensive', - ]; - const childLogger = loggerAdapter.child('child'); - childLogger[level](...testData); - lastCallback(); - const logJson = JSON.parse(log); - expect(logJson).toEqual({ - level: pino.levels.values[level], - foo: 'bar', - msg: 'Hello, World, 42, true, Expensive', - name: 'child', - pid: process.pid, - time: expect.any(Number), - hostname: hostname(), - }); - }); - it('deduplicate names', async () => { - const logger = pino( - { - level, - }, - stream, - ); - const loggerAdapter = createLoggerFromPino(logger); - const testData = [ - 'Hello', - ['World'], - { foo: 'bar' }, - 42, - true, - null, - undefined, - () => 'Expensive', - ]; - const childLogger = loggerAdapter.child('child').child('child'); - childLogger[level](...testData); - lastCallback(); - const logJson = JSON.parse(log); - expect(logJson).toEqual({ - level: pino.levels.values[level], - foo: 'bar', - msg: 'Hello, World, 42, true, Expensive', - name: 'child', - pid: process.pid, - time: expect.any(Number), - hostname: hostname(), - }); - }); - it('nested', async () => { - const logger = pino( - { - level, - }, - stream, - ); - const loggerAdapter = createLoggerFromPino(logger); - const testData = [ - 'Hello', - ['World'], - { foo: 'bar' }, - 42, - true, - null, - undefined, - () => 'Expensive', - ]; - const childLogger = loggerAdapter.child('child'); - const nestedLogger = childLogger.child('nested'); - nestedLogger[level](...testData); - lastCallback(); - const logJson = JSON.parse(log); - expect(logJson).toEqual({ - level: pino.levels.values[level], - foo: 'bar', - msg: 'Hello, World, 42, true, Expensive', - name: 'child, nested', - pid: process.pid, - time: expect.any(Number), - hostname: hostname(), - }); - }); - }); - } -}); diff --git a/packages/logger-winston/CHANGELOG.md b/packages/logger-winston/CHANGELOG.md deleted file mode 100644 index a9474a769..000000000 --- a/packages/logger-winston/CHANGELOG.md +++ /dev/null @@ -1,47 +0,0 @@ -# @graphql-hive/logger-winston - -## 1.0.2 - -### Patch Changes - -- [#727](https://github.com/graphql-hive/gateway/pull/727) [`c54a080`](https://github.com/graphql-hive/gateway/commit/c54a080b8b9c477ed55dd7c23fc8fcae9139bec8) Thanks [@renovate](https://github.com/apps/renovate)! - dependencies updates: - - - Updated dependency [`@whatwg-node/disposablestack@^0.0.6` ↗︎](https://www.npmjs.com/package/@whatwg-node/disposablestack/v/0.0.6) (from `^0.0.5`, in `dependencies`) - -- [#775](https://github.com/graphql-hive/gateway/pull/775) [`33f7dfd`](https://github.com/graphql-hive/gateway/commit/33f7dfdb10eef2a1e7f6dffe0ce6e4bb3cc7c2c6) Thanks [@renovate](https://github.com/apps/renovate)! - dependencies updates: - - - Updated dependency [`@graphql-mesh/types@^0.104.0` ↗︎](https://www.npmjs.com/package/@graphql-mesh/types/v/0.104.0) (from `^0.103.18`, in `dependencies`) - -## 1.0.1 - -### Patch Changes - -- [#696](https://github.com/graphql-hive/gateway/pull/696) [`a289faa`](https://github.com/graphql-hive/gateway/commit/a289faae1469eb46f1458be341d21909fe5f8f8f) Thanks [@ardatan](https://github.com/ardatan)! - dependencies updates: - - - Updated dependency [`@graphql-mesh/types@^0.103.18` ↗︎](https://www.npmjs.com/package/@graphql-mesh/types/v/0.103.18) (from `^0.103.6`, in `dependencies`) - -## 1.0.0 - -### Major Changes - -- [#622](https://github.com/graphql-hive/gateway/pull/622) [`16f9bd9`](https://github.com/graphql-hive/gateway/commit/16f9bd981d5779c585c00bf79e790c94b00326f1) Thanks [@ardatan](https://github.com/ardatan)! - **Winston Adapter** - - Now you can integrate [Winston](https://github.com/winstonjs/winston) into Hive Gateway on Node.js - - ```ts - import { defineConfig } from '@graphql-hive/gateway'; - import { createLoggerFromWinston } from '@graphql-hive/winston'; - import { createLogger, format, transports } from 'winston'; - - // Create a Winston logger - const winstonLogger = createLogger({ - level: 'info', - format: format.combine(format.timestamp(), format.json()), - transports: [new transports.Console()], - }); - - export const gatewayConfig = defineConfig({ - // Create an adapter for Winston - logging: createLoggerFromWinston(winstonLogger), - }); - ``` diff --git a/packages/logger-winston/package.json b/packages/logger-winston/package.json deleted file mode 100644 index 6238b15b5..000000000 --- a/packages/logger-winston/package.json +++ /dev/null @@ -1,57 +0,0 @@ -{ - "name": "@graphql-hive/logger-winston", - "version": "1.0.2", - "type": "module", - "repository": { - "type": "git", - "url": "git+https://github.com/graphql-hive/gateway.git", - "directory": "packages/logger-winston" - }, - "homepage": "https://the-guild.dev/graphql/hive/docs/gateway", - "author": { - "email": "contact@the-guild.dev", - "name": "The Guild", - "url": "https://the-guild.dev" - }, - "license": "MIT", - "engines": { - "node": ">=18.0.0" - }, - "main": "./dist/index.js", - "types": "./dist/index.d.ts", - "exports": { - ".": { - "require": { - "types": "./dist/index.d.cts", - "default": "./dist/index.cjs" - }, - "import": { - "types": "./dist/index.d.ts", - "default": "./dist/index.js" - } - }, - "./package.json": "./package.json" - }, - "files": [ - "dist" - ], - "scripts": { - "build": "pkgroll --clean-dist", - "prepack": "yarn build" - }, - "peerDependencies": { - "graphql": "^15.9.0 || ^16.9.0", - "winston": "^3.17.0" - }, - "dependencies": { - "@graphql-mesh/types": "^0.104.0", - "@whatwg-node/disposablestack": "^0.0.6", - "tslib": "^2.8.1" - }, - "devDependencies": { - "graphql": "16.11.0", - "pkgroll": "2.12.2", - "winston": "^3.17.0" - }, - "sideEffects": false -} diff --git a/packages/logger-winston/src/index.ts b/packages/logger-winston/src/index.ts deleted file mode 100644 index d050b60f0..000000000 --- a/packages/logger-winston/src/index.ts +++ /dev/null @@ -1,102 +0,0 @@ -import type { - LazyLoggerMessage, - Logger as MeshLogger, -} from '@graphql-mesh/types'; -import { DisposableSymbols } from '@whatwg-node/disposablestack'; -import type { Logger as WinstonLogger } from 'winston'; - -function prepareArgs(messageArgs: LazyLoggerMessage[]) { - const flattenedMessageArgs = messageArgs - .flat(Infinity) - .flatMap((messageArg) => { - if (typeof messageArg === 'function') { - messageArg = messageArg(); - } - if (messageArg?.toJSON) { - messageArg = messageArg.toJSON(); - } - if (messageArg instanceof AggregateError) { - return messageArg.errors; - } - return messageArg; - }); - let message: string = ''; - const extras: any[] = []; - for (let messageArg of flattenedMessageArgs) { - if (messageArg == null) { - continue; - } - const typeofMessageArg = typeof messageArg; - if ( - typeofMessageArg === 'string' || - typeofMessageArg === 'number' || - typeofMessageArg === 'boolean' - ) { - message = message ? message + ', ' + messageArg : messageArg; - } else if (typeofMessageArg === 'object') { - extras.push(messageArg); - } - } - return [message, ...extras] as const; -} - -class WinstonLoggerAdapter implements MeshLogger, Disposable { - public name?: string; - constructor( - private winstonLogger: WinstonLogger, - private meta: Record = {}, - ) { - if (meta['name']) { - this.name = meta['name']; - } - } - log(...args: any[]) { - if (this.winstonLogger.isInfoEnabled()) { - this.winstonLogger.info(...prepareArgs(args)); - } - } - info(...args: any[]) { - if (this.winstonLogger.isInfoEnabled()) { - this.winstonLogger.info(...prepareArgs(args)); - } - } - warn(...args: any[]) { - if (this.winstonLogger.isWarnEnabled()) { - this.winstonLogger.warn(...prepareArgs(args)); - } - } - error(...args: any[]) { - if (this.winstonLogger.isErrorEnabled()) { - this.winstonLogger.error(...prepareArgs(args)); - } - } - debug(...lazyArgs: LazyLoggerMessage[]) { - if (this.winstonLogger.isDebugEnabled()) { - this.winstonLogger.debug(...prepareArgs(lazyArgs)); - } - } - child(nameOrMeta: string | Record) { - if (typeof nameOrMeta === 'string') { - nameOrMeta = { - name: this.name - ? this.name.includes(nameOrMeta) - ? this.name - : `${this.name}, ${nameOrMeta}` - : nameOrMeta, - }; - } - return new WinstonLoggerAdapter(this.winstonLogger.child(nameOrMeta), { - ...this.meta, - ...nameOrMeta, - }); - } - [DisposableSymbols.dispose]() { - return this.winstonLogger.close(); - } -} - -export function createLoggerFromWinston( - winstonLogger: WinstonLogger, -): WinstonLoggerAdapter { - return new WinstonLoggerAdapter(winstonLogger); -} diff --git a/packages/logger-winston/tests/winston.spec.ts b/packages/logger-winston/tests/winston.spec.ts deleted file mode 100644 index 9b9034dad..000000000 --- a/packages/logger-winston/tests/winston.spec.ts +++ /dev/null @@ -1,147 +0,0 @@ -import { Writable } from 'node:stream'; -import { describe, expect, it } from 'vitest'; -import * as winston from 'winston'; -import { createLoggerFromWinston } from '../src'; - -describe('Winston', () => { - let log = ''; - let lastCallback = () => {}; - const stream = new Writable({ - write(chunk, _encoding, callback) { - log = chunk.toString('utf-8'); - lastCallback = callback; - }, - }); - const logLevels = ['error', 'warn', 'info', 'debug'] as const; - for (const level of logLevels) { - describe(`Level: ${level}`, () => { - it('basic', () => { - const logger = winston.createLogger({ - level, - format: winston.format.json(), - transports: [ - new winston.transports.Stream({ - stream, - }), - ], - }); - using loggerAdapter = createLoggerFromWinston(logger); - const testData = [ - 'Hello', - ['World'], - { foo: 'bar' }, - 42, - true, - null, - undefined, - () => 'Expensive', - ]; - loggerAdapter[level](...testData); - lastCallback(); - const logJson = JSON.parse(log, (_key, value) => value); - expect(logJson).toEqual({ - level, - foo: 'bar', - message: 'Hello, World, 42, true, Expensive', - }); - }); - it('child', () => { - const logger = winston.createLogger({ - level, - format: winston.format.json(), - transports: [ - new winston.transports.Stream({ - stream, - }), - ], - }); - const loggerAdapter = createLoggerFromWinston(logger); - const testData = [ - 'Hello', - ['World'], - { foo: 'bar' }, - 42, - true, - null, - undefined, - () => 'Expensive', - ]; - const childLogger = loggerAdapter.child('child'); - childLogger[level](...testData); - lastCallback(); - const logJson = JSON.parse(log, (_key, value) => value); - expect(logJson).toEqual({ - level, - foo: 'bar', - message: 'Hello, World, 42, true, Expensive', - name: 'child', - }); - }); - it('deduplicate names', () => { - const logger = winston.createLogger({ - level, - format: winston.format.json(), - transports: [ - new winston.transports.Stream({ - stream, - }), - ], - }); - const loggerAdapter = createLoggerFromWinston(logger); - const testData = [ - 'Hello', - ['World'], - { foo: 'bar' }, - 42, - true, - null, - undefined, - () => 'Expensive', - ]; - const childLogger = loggerAdapter.child('child').child('child'); - childLogger[level](...testData); - lastCallback(); - const logJson = JSON.parse(log, (_key, value) => value); - expect(logJson).toEqual({ - level, - foo: 'bar', - message: 'Hello, World, 42, true, Expensive', - name: 'child', - }); - }); - it('nested', () => { - const logger = winston.createLogger({ - level, - format: winston.format.json(), - transports: [ - new winston.transports.Stream({ - stream, - }), - ], - }); - const loggerAdapter = createLoggerFromWinston(logger); - const testData = [ - 'Hello', - ['World'], - { foo: 'bar' }, - 42, - true, - null, - undefined, - () => 'Expensive', - ]; - const childLogger = loggerAdapter.child('child'); - const nestedLogger = childLogger.child('nested'); - nestedLogger[level](...testData); - lastCallback(); - const logJson = JSON.parse(log, (_key, value) => value); - expect(logJson).toEqual({ - level, - foo: 'bar', - message: 'Hello, World, 42, true, Expensive', - name: 'child, nested', - }); - }); - }); - } -}); diff --git a/packages/logger/README.md b/packages/logger/README.md new file mode 100644 index 000000000..d94c8cb12 --- /dev/null +++ b/packages/logger/README.md @@ -0,0 +1,733 @@ +# Hive Logger + +Lightweight and customizable logging utility designed for use within the GraphQL Hive ecosystem. It provides structured logging capabilities, making it easier to debug and monitor applications effectively. + +## Compatibility + +The Hive Logger is designed to work seamlessly in all JavaScript environments, including Node.js, browsers, and serverless platforms. Its lightweight design ensures minimal overhead, making it suitable for a wide range of applications. + +# Getting Started + +## Install + +```sh +npm i @graphql-hive/logger +``` + +## Basic Usage + +Create a default logger that set to the `info` log level writing to the console. + +```ts +import { Logger } from '@graphql-hive/logger'; + +const log = new Logger(); + +log.debug('I wont be logged by default'); + +log.info({ some: 'attributes' }, 'Hello %s!', 'world'); + +const child = log.child({ requestId: '123-456' }); + +child.warn({ more: 'attributes' }, 'Oh hello child!'); + +const err = new Error('Woah!'); + +child.error({ err }, 'Something went wrong!'); +``` + +Will produce the following output to the console output: + + +```sh +2025-04-10T14:00:00.000Z INF Hello world! + some: "attributes" +2025-04-10T14:00:00.000Z WRN Oh hello child! + requestId: "123-456" + more: "attributes" +2025-04-10T14:00:00.000Z ERR Something went wrong! + requestId: "123-456" + err: { + stack: "Error: Woah! + at (/project/example.js:13:13) + at ModuleJob.run (node:internal/modules/esm/module_job:274:25) + at async onImport.tracePromise.__proto__ (node:internal/modules/esm/loader:644:26) + at async asyncRunEntryPointWithESMLoader (node:internal/modules/run_main:98:5)" + message: "Woah!" + name: "Error" + class: "Error" + } +``` + + +or if you wish to have JSON output, set the `LOG_JSON` environment variable to a truthy value: + + +```sh +$ LOG_JSON=1 node example.js + +{"some":"attributes","level":"info","msg":"Hello world!","timestamp":"2025-04-10T14:00:00.000Z"} +{"requestId":"123-456","more":"attributes","level":"info","msg":"Hello child!","timestamp":"2025-04-10T14:00:00.000Z"} +{"requestId":"123-456","err":{"stack":"Error: Woah!\n at (/project/example.js:13:13)\n at ModuleJob.run (node:internal/modules/esm/module_job:274:25)\n at async onImport.tracePromise.__proto__ (node:internal/modules/esm/loader:644:26)\n at async asyncRunEntryPointWithESMLoader (node:internal/modules/run_main:98:5)","message":"Woah!","name":"Error","class":"Error"},"level":"error","msg":"Something went wrong!","timestamp":"2025-04-10T14:00:00.000Z"} +``` + + +## Logging Methods and Their Arguments + +Hive Logger provides convenient methods for each log level: `trace`, `debug`, `info`, `warn`, and `error`. + +All logging methods support flexible argument patterns for structured and formatted logging: + +### No Arguments + +Logs an empty message at the specified level. + +```ts +log.debug(); +``` + +```sh +2025-04-10T14:00:00.000Z DBG +``` + +### Attributes Only + +Logs structured attributes without a message. + +```ts +log.info({ hello: 'world' }); +``` + + +```sh +2025-04-10T14:00:00.000Z INF + hello: "world" +``` + + +### Message with Interpolation + +Logs a formatted message, similar to printf-style formatting. Read more about it in the [Message Formatting section](#message-formatting). + +```ts +log.warn('Hello %s!', 'World'); +``` + + +```sh +2025-04-10T14:00:00.000Z WRN Hello World! +``` + + +### Attributes and Message (with interpolation) + +Logs structured attributes and a formatted message. The attributes can be anything object-like, including classes. + +```ts +const err = new Error('Something went wrong!'); +log.error(err, 'Problem occurred at %s', new Date()); +``` + + +```sh +2025-04-10T14:00:00.000Z ERR Problem occurred at Thu Apr 10 2025 14:00:00 GMT+0200 (Central European Summer Time) + stack: "Error: Something went wrong! + at (/projects/example.js:2:1)" + message: "Something went wrong!" + name: "Error" + class: "Error" +``` + + +## Message Formatting + +The Hive Logger uses the [`quick-format-unescaped` library](https://github.com/pinojs/quick-format-unescaped) to format log messages that include interpolation (e.g., placeholders like %s, %d, etc.). + +```ts +import { Logger } from '@graphql-hive/logger'; + +const log = new Logger(); + +log.info('hello %s %j %d %o', 'world', { obj: true }, 4, { another: 'obj' }); +``` + +Outputs: + +```sh +2025-04-10T14:00:00.000Z INF hello world {"obj":true} 4 {"another":"obj"} +``` + +Available interpolation placeholders are: + +- `%s` - string +- `%d` and `%f` - number with(out) decimals +- `%i` - integer number +- `%o`,`%O` and `%j` - JSON stringified object +- `%%` - escaped percentage sign + +## Logging Levels + +The default logger uses the `info` log level which will make sure to log only `info`+ logs. Available log levels are: + +- false (disables logging altogether) +- `trace` +- `debug` +- `info` _default_ +- `warn` +- `error` + +### Lazy Arguments and Performance + +Hive Logger supports "lazy" attributes for log methods. If you pass a function as the attributes argument, it will only be evaluated if the log level is enabled and the log will actually be written. This avoids unnecessary computation for expensive attributes when the log would be ignored due to the current log level. + +```ts +import { Logger } from '@graphql-hive/logger'; + +const log = new Logger({ level: 'info' }); + +log.debug( + // This function will NOT be called, since 'debug' is below the current log level. + () => ({ expensive: computeExpensiveValue() }), + 'This will not be logged', +); + +log.info( + // This function WILL be called, since 'info' log level is set. + () => ({ expensive: computeExpensiveValue() }), + 'This will be logged', +); +``` + +### Change Logging Level on Creation + +When creating an instance of the logger, you can configure the logging level by configuring the `level` option. Like this: + +```ts +import { Logger } from '@graphql-hive/logger'; + +const log = new Logger({ level: 'debug' }); + +log.trace( + // you can suply "lazy" attributes which wont be evaluated unless the log level allows logging + () => ({ + wont: 'be evaluated', + some: expensiveOperation(), + }), + 'Wont be logged and attributes wont be evaluated', +); + +log.debug('Hello world!'); + +const child = log.child('[prefix] '); + +child.debug('Child loggers inherit the parent log level'); +``` + +Outputs the following to the console: + + +```sh +2025-04-10T14:00:00.000Z DBG Hello world! +2025-04-10T14:00:00.000Z DBG [prefix] Child loggers inherit the parent log level +``` + + +### Change Logging Level Dynamically + +Alternatively, you can change the logging level dynamically during runtime. There's two possible ways of doing that. + +#### Using `log.setLevel(level: LogLevel)` + +One way of doing it is by using the log's `setLevel` method. + +```ts +import { Logger } from '@graphql-hive/logger'; + +const log = new Logger({ level: 'debug' }); + +log.debug('Hello world!'); + +const child = log.child('[prefix] '); + +child.debug('Child loggers inherit the parent log level'); + +log.setLevel('trace'); + +log.trace(() => ({ hi: 'there' }), 'Now tracing is logged too!'); + +child.trace('Also on the child logger'); + +child.setLevel('info'); + +log.trace('Still logging!'); + +child.debug('Wont be logged because the child has a different log level now'); + +child.info('Hello child!'); +``` + +Outputs the following to the console: + + +```sh +2025-04-10T14:00:00.000Z DBG Hello world! +2025-04-10T14:00:00.000Z DBG [prefix] Child loggers inherit the parent log level +2025-04-10T14:00:00.000Z TRC Now tracing is logged too! + hi: "there" +2025-04-10T14:00:00.000Z TRC [prefix] Also on the child logger +2025-04-10T14:00:00.000Z TRC Still logging! +2025-04-10T14:00:00.000Z INF Hello child! +``` + + +#### Using `LoggerOptions.level` Function + +Another way of doing it is to pass a function to the `level` option when creating a logger. + +```ts +import { Logger } from '@graphql-hive/logger'; + +let isDebug = false; + +const log = new Logger({ + level: () => { + if (isDebug) { + return 'debug'; + } + return 'info'; + }, +}); + +log.debug('isDebug is false, so this wont be logged'); + +log.info('Hello world!'); + +const child = log.child('[scoped] '); + +child.debug( + 'Child loggers inherit the parent log level function, so this wont be logged either', +); + +// enable debug mode +isDebug = true; + +child.debug('Now debug is enabled and logged'); +``` + +Outputs the following: + + +```sh +2025-04-10T14:00:00.000Z INF Hello world! +2025-04-10T14:00:00.000Z DBG [scoped] Now debug is enabled and logged +``` + + +## Child Loggers + +Child loggers in Hive Logger allow you to create new logger instances that inherit configuration (such as log level, writers, and attributes) from their parent logger. This is useful for associating contextual information (like request IDs or component names) with all logs from a specific part of your application. + +When you create a child logger using the child method, you can: + +- Add a prefix to all log messages from the child logger. +- Add attributes that will be included in every log entry from the child logger. +- Inherit the log level and writers from the parent logger, unless explicitly changed on the child. + +This makes it easy to organize and structure logs in complex applications, ensuring that related logs carry consistent context. + +> [!IMPORTANT] +> In a child logger, attributes provided in individual log calls will overwrite any attributes inherited from the parent logger if they share the same keys. This allows you to override or add context-specific attributes for each log entry. + +For example, running this: + +```ts +import { Logger } from '@graphql-hive/logger'; + +const log = new Logger(); + +const child = log.child({ requestId: '123-456' }, '[child] '); + +child.info('Hello World!'); +child.info({ requestId: 'overwritten attribute' }); + +const nestedChild = child.child({ traceId: '789-012' }, '[nestedChild] '); + +nestedChild.info('Hello Deep Down!'); +``` + +Will output: + + +```sh +2025-04-10T14:00:00.000Z INF [child] Hello World! + requestId: "123-456" +2025-04-10T14:00:00.000Z INF [child] + requestId: "overwritten attribute" +2025-04-20T18:39:30.291Z INF [child] [nestedChild] Hello Deep Down! + requestId: "123-456" + traceId: "789-012" +``` + + +## Writers + +Logger writers are responsible for handling how and where log messages are output. In Hive Logger, writers are pluggable components that receive structured log data and determine its final destination and format. This allows you to easily customize logging behavior, such as printing logs to the console, writing them as JSON, storing them in memory for testing, or sending them to external systems. + +By default, Hive Logger provides several built-in writers, but you can also implement your own to suit your application's needs. The built-ins are: + +### `MemoryLogWriter` + +Writes the logs to memory allowing you to access the logs. Mostly useful for testing. + +```ts +import { Logger, MemoryLogWriter } from '@graphql-hive/logger'; + +const writer = new MemoryLogWriter(); + +const log = new Logger({ writers: [writer] }); + +log.info({ my: 'attrs' }, 'Hello World!'); + +console.log(writer.logs); +``` + +Outputs: + +```sh +[ { level: 'info', msg: 'Hello World!', attrs: { my: 'attrs' } } ] +``` + +### `ConsoleLogWriter` (default) + +The default log writer used by the Hive Logger. It outputs log messages to the console in a human-friendly, colorized format, making it easy to distinguish log levels and read structured attributes. Each log entry includes a timestamp, the log level (with color), the message, and any additional attributes (with colored keys), which are pretty-printed and formatted for clarity. + +The writer works in both Node.js and browser-like environments, automatically disabling colors if not supported. This makes `ConsoleLogWriter` ideal for all cases, providing clear and readable logs out of the box. + +```ts +import { ConsoleLogWriter, Logger } from '@graphql-hive/logger'; + +const writer = new ConsoleLogWriter({ + noColor: true, // defaults to env.NO_COLOR. read more: https://no-color.org/ + noTimestamp: true, +}); + +const log = new Logger({ writers: [writer] }); + +log.info({ my: 'attrs' }, 'Hello World!'); +``` + +Outputs: + + +```sh +INF Hello World! + my: "attrs" +``` + + +### `JSONLogWriter` + +> [!NOTE] +> Will be used then the `LOG_JSON=1` environment variable is provided. + +Built-in log writer that outputs each log entry as a structured JSON object. When used, it prints logs to the console in JSON format, including all provided attributes, the log level, message, and a timestamp. + +In the JSONLogWriter implementation, any attributes you provide with the keys `msg`, `timestamp`, or `level` will be overwritten in the final log output. This is because the writer explicitly sets these fields when constructing the log object. If you include these keys in your attributes, their values will be replaced by the logger's own values in the JSON output. + +If the `LOG_JSON_PRETTY=1` environment variable is provided, the output will be pretty-printed for readability; otherwise, it is compact. + +This writer's format is ideal for machine parsing, log aggregation, or integrating with external logging systems, especially useful for production environments or when logs need to be consumed by other tools. + +```ts +import { JSONLogWriter, Logger } from '@graphql-hive/logger'; + +const log = new Logger({ writers: [new JSONLogWriter()] }); + +log.info({ my: 'attrs' }, 'Hello World!'); +``` + +Outputs: + + +```sh +{"my":"attrs","level":"info","msg":"Hello World!","timestamp":"2025-04-10T14:00:00.000Z"} +``` + + +Or pretty printed: + + +```sh +$ LOG_JSON_PRETTY=1 node example.js + +{ + "my": "attrs", + "level": "info", + "msg": "Hello World!", + "timestamp": "2025-04-10T14:00:00.000Z" +} +``` + + +### Optional Writers + +Hive Logger includes some writers for common loggers of the JavaScript ecosystem with optional peer dependencies. + +#### `PinoLogWriter` + +Use the [Node.js `pino` logger library](https://github.com/pinojs/pino) for writing Hive Logger's logs. + +`pino` is an optional peer dependency, so you must install it first. + +```sh +npm i pino pino-pretty +``` + +```ts +import { Logger } from '@graphql-hive/logger'; +import { PinoLogWriter } from '@graphql-hive/logger/writers/pino'; +import pino from 'pino'; + +const pinoLogger = pino({ + transport: { + target: 'pino-pretty', + }, +}); + +const log = new Logger({ writers: [new PinoLogWriter(pinoLogger)] }); + +log.info({ some: 'attributes' }, 'hello world'); +``` + + +```sh +[14:00:00.000] INFO (20744): hello world + some: "attributes" +``` + + +#### `WinstonLogWriter` + +Use the [Node.js `winston` logger library](https://github.com/winstonjs/winston) for writing Hive Logger's logs. + +`winston` is an optional peer dependency, so you must install it first. + +```sh +npm i winston +``` + +```ts +import { Logger } from '@graphql-hive/logger'; +import { WinstonLogWriter } from '@graphql-hive/logger/writers/winston'; +import winston from 'winston'; + +const winstonLogger = winston.createLogger({ + transports: [new winston.transports.Console()], +}); + +const log = new Logger({ writers: [new WinstonLogWriter(winstonLogger)] }); + +log.info({ some: 'attributes' }, 'hello world'); +``` + +```sh +{"level":"info","message":"hello world","some":"attributes"} +``` + +> [!IMPORTANT] +> Winston logger does not have a "trace" log level. Hive Logger will instead use "verbose" when writing logs to Winston. + +### Custom Writers + +You can implement custom log writers for the Hive Logger by creating a class that implements the `LogWriter` interface. This interface requires a single `write` method, which receives the log level, attributes, and message. + +Your writer can perform any action, such as sending logs to a file, external service, or custom destination. + +Writers can be synchronous (returning `void`) or asynchronous (returning a `Promise`). If your writer performs asynchronous operations (like network requests or file writes), simply return a promise from the `write` method. + +```ts +import { + Attributes, + ConsoleLogWriter, + Logger, + LogLevel, + LogWriter, +} from '@graphql-hive/logger'; + +class HTTPLogWriter implements LogWriter { + async write(level: LogLevel, attrs: Attributes, msg: string) { + await fetch('https://my-log-service.com', { + method: 'POST', + headers: { 'content-type': 'application/json' }, + body: JSON.stringify({ level, attrs, msg }), + }); + } +} + +const log = new Logger({ + // send logs both to the HTTP loggging service and output them to the console + writers: [new HTTPLogWriter(), new ConsoleLogWriter()], +}); + +log.info('Hello World!'); + +await log.flush(); // make sure all async writes settle +``` + +#### Flushing and Non-Blocking Logging + +The logger does not block when you log asynchronously. Instead, it tracks all pending async writes internally. When you call `log.flush()` it waits for all pending writes to finish, ensuring no logs are lost on shutdown. During normal operation, logging remains fast and non-blocking, even if some writers are async. + +This design allows you to use async writers without impacting the performance of your application or blocking the main thread. + +##### Explicit Resource Management + +The Hive Logger also supports [Explicit Resource Management](https://github.com/tc39/proposal-explicit-resource-management). This allows you to ensure that all pending asynchronous log writes are properly flushed before your application exits or when the logger is no longer needed. + +You can use the logger with `await using` (in environments that support it) to wait for all log operations to complete. This is especially useful in serverless or short-lived environments where you want to guarantee that no logs are lost due to unfinished asynchronous operations. + +```ts +import { + Attributes, + ConsoleLogWriter, + Logger, + LogLevel, + LogWriter, +} from '@graphql-hive/logger'; + +class HTTPLogWriter implements LogWriter { + async write(level: LogLevel, attrs: Attributes, msg: string) { + await fetch('https://my-log-service.com', { + method: 'POST', + headers: { 'content-type': 'application/json' }, + body: JSON.stringify({ level, attrs, msg }), + }); + } +} + +{ + await using log = new Logger({ + // send logs both to the HTTP loggging service and output them to the console + writers: [new HTTPLogWriter(), new ConsoleLogWriter()], + }); + + log.info('Hello World!'); +} + +// logger went out of scope and all of the logs have been flushed +``` + +##### Handling Async Write Errors + +The Logger handles write errors for asynchronous writers by tracking all write promises. When `await log.flush()` is called (including during async disposal), it waits for all pending writes to settle. If any writes fail (i.e., their promises reject), their errors are collected and after all writes have settled, if there were any errors, an `AggregateError` is thrown containing all the individual write errors. + +```ts +import { Logger } from './Logger'; + +let i = 0; +const log = new Logger({ + writers: [ + { + async write() { + i++; + throw new Error('Write failed! #' + i); + }, + }, + ], +}); + +// no fail during logs +log.info('hello'); +log.info('world'); + +try { + await log.flush(); +} catch (e) { + // flush will fail with each individually failed writes + console.error(e); +} +``` + +Outputs: + +```sh +AggregateError: Failed to flush 2 writes + at async (/project/example.js:20:3) { + [errors]: [ + Error: Write failed! #1 + at Object.write (/project/example.js:9:15), + Error: Write failed! #2 + at Object.write (/project/example.js:9:15) + ] +} +``` + +## Advanced Serialization of Attributes + +Hive Logger uses advanced serialization to ensure that all attributes are logged safely and readably, even when they contain complex or circular data structures. This means you can log rich, nested objects or errors as attributes without worrying about serialization failures or unreadable logs. + +For example, the logger will serialize the error object, including its message and stack, in a safe and readable way. This advanced serialization is applied automatically to all attributes passed to log methods, child loggers, and writers. + +```ts +import { Logger } from '@graphql-hive/logger'; + +const log = new Logger(); + +class DatabaseError extends Error { + constructor(message: string) { + super(message); + this.name = 'DatabaseError'; + } +} +const dbErr = new DatabaseError('Connection failed'); +const userErr = new Error('Updating user failed', { cause: dbErr }); +const errs = new AggregateError([dbErr, userErr], 'Failed to update user'); + +log.error(errs); +``` + + +```sh +2025-04-10T14:00:00.000Z ERR + stack: "AggregateError: Failed to update user + at (/project/example.js:13:14) + at ModuleJob.run (node:internal/modules/esm/module_job:274:25) + at async onImport.tracePromise.__proto__ (node:internal/modules/esm/loader:644:26) + at async asyncRunEntryPointWithESMLoader (node:internal/modules/run_main:98:5)" + message: "Failed to update user" + errors: [ + { + stack: "DatabaseError: Connection failed + at (/project/example.js:11:15) + at ModuleJob.run (node:internal/modules/esm/module_job:274:25) + at async onImport.tracePromise.__proto__ (node:internal/modules/esm/loader:644:26) + at async asyncRunEntryPointWithESMLoader (node:internal/modules/run_main:98:5)" + message: "Database connection failed" + name: "DatabaseError" + class: "DatabaseError" + } + { + stack: "Error: Updating user failed + at (/project/example.js:12:17) + at ModuleJob.run (node:internal/modules/esm/module_job:274:25) + at async onImport.tracePromise.__proto__ (node:internal/modules/esm/loader:644:26) + at async asyncRunEntryPointWithESMLoader (node:internal/modules/run_main:98:5)" + message: "Updating user failed" + cause: { + stack: "DatabaseError: Connection failed + at (/project/example.js:11:15) + at ModuleJob.run (node:internal/modules/esm/module_job:274:25) + at async onImport.tracePromise.__proto__ (node:internal/modules/esm/loader:644:26) + at async asyncRunEntryPointWithESMLoader (node:internal/modules/run_main:98:5)" + message: "Database connection failed" + name: "DatabaseError" + class: "DatabaseError" + } + name: "Error" + class: "Error" + } + ] + name: "AggregateError" + class: "AggregateError" +``` + diff --git a/packages/logger/package.json b/packages/logger/package.json new file mode 100644 index 000000000..d9203afe4 --- /dev/null +++ b/packages/logger/package.json @@ -0,0 +1,93 @@ +{ + "name": "@graphql-hive/logger", + "version": "0.0.0", + "type": "module", + "repository": { + "type": "git", + "url": "git+https://github.com/graphql-hive/gateway.git", + "directory": "packages/logger" + }, + "author": { + "email": "contact@the-guild.dev", + "name": "The Guild", + "url": "https://the-guild.dev" + }, + "license": "MIT", + "engines": { + "node": ">=18.0.0" + }, + "main": "./dist/index.js", + "types": "./dist/index.d.ts", + "exports": { + ".": { + "require": { + "types": "./dist/index.d.cts", + "default": "./dist/index.cjs" + }, + "import": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + }, + "./request": { + "require": { + "types": "./dist/request.d.cts", + "default": "./dist/request.cjs" + }, + "import": { + "types": "./dist/request.d.ts", + "default": "./dist/request.js" + } + }, + "./writers/pino": { + "require": { + "types": "./dist/writers/pino.d.cts", + "default": "./dist/writers/pino.cjs" + }, + "import": { + "types": "./dist/writers/pino.d.ts", + "default": "./dist/writers/pino.js" + } + }, + "./writers/winston": { + "require": { + "types": "./dist/writers/winston.d.cts", + "default": "./dist/writers/winston.cjs" + }, + "import": { + "types": "./dist/writers/winston.d.ts", + "default": "./dist/writers/winston.js" + } + }, + "./package.json": "./package.json" + }, + "files": [ + "dist" + ], + "scripts": { + "build": "pkgroll --clean-dist", + "prepack": "yarn build" + }, + "peerDependencies": { + "pino": "^9.6.0" + }, + "peerDependenciesMeta": { + "pino": { + "optional": true + }, + "winston": { + "optional": true + } + }, + "devDependencies": { + "@types/quick-format-unescaped": "^4.0.3", + "@whatwg-node/disposablestack": "^0.0.6", + "fast-safe-stringify": "^2.1.1", + "pino": "^9.6.0", + "pkgroll": "2.11.2", + "quick-format-unescaped": "^4.0.4", + "winston": "^3.17.0" + }, + "sideEffects": false, + "dependencies.info": "all of the dependencies are in devDependencies which will bundle them into the package using pkgroll making pkgroll ultimately zero-dep with a smaller footprint because of tree-shaking" +} diff --git a/packages/logger/src/index.ts b/packages/logger/src/index.ts new file mode 100644 index 000000000..352b76440 --- /dev/null +++ b/packages/logger/src/index.ts @@ -0,0 +1,4 @@ +export * from './logger'; +export * from './writers'; +/** @deprecated Please migrate to using the './Logger' instead. */ +export * from './legacyLogger'; diff --git a/packages/logger/src/legacyLogger.ts b/packages/logger/src/legacyLogger.ts new file mode 100644 index 000000000..ed25e8948 --- /dev/null +++ b/packages/logger/src/legacyLogger.ts @@ -0,0 +1,98 @@ +import { Logger, LogLevel } from './logger'; +import { shouldLog } from './utils'; + +// type comes from "@graphql-mesh/types" package, we're copying them over just to avoid including the whole package +export type LazyLoggerMessage = (() => any | any[]) | any; + +/** @deprecated Please migrate to using the {@link Logger} instead.*/ +export class LegacyLogger { + #logger: Logger; + + constructor(logger: Logger) { + this.#logger = logger; + } + + static from(logger: Logger): LegacyLogger { + return new LegacyLogger(logger); + } + + #log(level: LogLevel, ...[maybeMsgOrArg, ...restArgs]: any[]) { + if (typeof maybeMsgOrArg === 'string') { + if (restArgs.length) { + this.#logger.log(level, restArgs, maybeMsgOrArg); + } else { + this.#logger.log(level, maybeMsgOrArg); + } + } else { + if (restArgs.length) { + this.#logger.log(level, [maybeMsgOrArg, ...restArgs]); + } else { + this.#logger.log(level, maybeMsgOrArg); + } + } + } + + log(...args: any[]) { + this.#log('info', ...args); + } + + warn(...args: any[]) { + this.#log('warn', ...args); + } + + info(...args: any[]) { + this.#log('info', ...args); + } + + error(...args: any[]) { + this.#log('error', ...args); + } + + debug(...lazyArgs: LazyLoggerMessage[]) { + if (!shouldLog(this.#logger.level, 'debug')) { + // we only return early here because only debug can have lazy logs + return; + } + this.#log('debug', ...handleLazyMessage(lazyArgs)); + } + + child(name: string | Record): LegacyLogger { + name = + stringifyName(name) + + // append space if object is strigified to space out the prefix + (typeof name === 'object' ? ' ' : ''); + if (this.#logger.prefix === name) { + return this; + } + return LegacyLogger.from(this.#logger.child(name)); + } + + addPrefix(prefix: string | Record): LegacyLogger { + prefix = stringifyName(prefix); + if (this.#logger.prefix?.includes(prefix)) { + // TODO: why do we do this? + return this; + } + return LegacyLogger.from(this.#logger.child(prefix)); + } +} + +function stringifyName(name: string | Record) { + if (typeof name === 'string' || typeof name === 'number') { + return `${name}`; + } + const names: string[] = []; + for (const [key, value] of Object.entries(name)) { + names.push(`${key}=${value}`); + } + return `${names.join(', ')}`; +} + +function handleLazyMessage(lazyArgs: LazyLoggerMessage[]) { + return lazyArgs.flat(Infinity).flatMap((arg) => { + if (typeof arg === 'function') { + return arg(); + } + return arg; + }); +} diff --git a/packages/logger/src/logger.ts b/packages/logger/src/logger.ts new file mode 100644 index 000000000..4bfe33a6e --- /dev/null +++ b/packages/logger/src/logger.ts @@ -0,0 +1,297 @@ +import { DisposableSymbols } from '@whatwg-node/disposablestack'; +import fastSafeStringify from 'fast-safe-stringify'; +import format from 'quick-format-unescaped'; +import { + Attributes, + getEnv, + isPromise, + logLevel, + MaybeLazy, + parseAttrs, + shallowMergeAttributes, + shouldLog, + truthyEnv, +} from './utils'; +import { ConsoleLogWriter, JSONLogWriter, LogWriter } from './writers'; + +export type { Attributes } from './utils'; + +export type LogLevel = 'trace' | 'debug' | 'info' | 'warn' | 'error'; + +export interface LoggerOptions { + /** + * The minimum log level to log. + * + * Providing `false` will disable all logging. + * + * Provided function will always be invoked to get the current log level. + * + * @default env.LOG_LEVEL || env.DEBUG ? 'debug' : 'info' + */ + level?: MaybeLazy; + /** A prefix to include in every log's message. */ + prefix?: string; + /** + * The attributes to include in all logs. Is mainly used to pass the parent + * attributes when creating {@link Logger.child child loggers}. + */ + attrs?: Attributes; + /** + * The log writers to use when writing logs. + * + * @default env.LOG_JSON ? [new JSONLogWriter()] : [new ConsoleLogWriter()] + */ + writers?: [LogWriter, ...LogWriter[]]; +} + +export class Logger implements AsyncDisposable { + #level: MaybeLazy; + #prefix: string | undefined; + #attrs: Attributes | undefined; + #writers: [LogWriter, ...LogWriter[]]; + #pendingWrites?: Set>; + + constructor(opts: LoggerOptions = {}) { + let logLevelEnv = getEnv('LOG_LEVEL'); + if (logLevelEnv && !(logLevelEnv in logLevel)) { + throw new Error( + `Invalid LOG_LEVEL environment variable "${logLevelEnv}". Must be one of: ${[...Object.keys(logLevel), 'false'].join(', ')}`, + ); + } + this.#level = + opts.level ?? + (logLevelEnv as LogLevel) ?? + (truthyEnv('DEBUG') ? 'debug' : 'info'); + this.#prefix = opts.prefix; + this.#attrs = opts.attrs; + this.#writers = + opts.writers ?? + (truthyEnv('LOG_JSON') + ? [new JSONLogWriter()] + : [new ConsoleLogWriter()]); + } + + /** The prefix that's prepended to each log message. */ + public get prefix() { + return this.#prefix; + } + + /** + * The attributes that are added to each log. If the log itself contains + * attributes with keys existing in {@link attrs}, the log's attributes will + * override. + */ + public get attrs() { + return this.#attrs; + } + + /** The current {@link LogLevel} of the logger. You can change the level using the {@link setLevel} method. */ + public get level() { + return typeof this.#level === 'function' ? this.#level() : this.#level; + } + + /** + * Sets the new {@link LogLevel} of the logger. All subsequent logs, and {@link child child loggers} whose + * level did not change, will respect the new level. + */ + public setLevel(level: MaybeLazy) { + this.#level = level; + } + + public write( + level: LogLevel, + attrs: Attributes | null | undefined, + msg: string | null | undefined, + ): void { + for (const w of this.#writers) { + const write$ = w.write(level, attrs, msg); + if (isPromise(write$)) { + this.#pendingWrites ??= new Set(); + this.#pendingWrites.add(write$); + write$ + .then(() => { + // we remove from pending writes only if the write was successful + this.#pendingWrites!.delete(write$); + }) + .catch((e) => { + // otherwise we keep in the pending write to throw on flush + console.error('Failed to write async log', e); + }); + } + } + } + + public flush() { + if (this.#pendingWrites?.size) { + const errs: unknown[] = []; + return Promise.allSettled( + Array.from(this.#pendingWrites).map((w) => + w.catch((err) => errs.push(err)), + ), + ).then(() => { + this.#pendingWrites!.clear(); + if (errs.length) { + throw new AggregateError( + errs, + `Failed to flush ${errs.length} writes`, + ); + } + }); + } + return; + } + + async [DisposableSymbols.asyncDispose]() { + return this.flush(); + } + + // + + public child(prefix: string): Logger; + public child(attrs: Attributes, prefix?: string): Logger; + public child(prefixOrAttrs: string | Attributes, prefix?: string): Logger { + if (typeof prefixOrAttrs === 'string') { + return new Logger({ + level: () => this.level, // inherits the parent level (yet can be changed on child only when using setLevel) + prefix: (this.#prefix || '') + prefixOrAttrs, + attrs: this.#attrs, + writers: this.#writers, + }); + } + return new Logger({ + level: () => this.level, // inherits the parent level (yet can be changed on child only when using setLevel) + prefix: (this.#prefix || '') + (prefix || '') || undefined, + attrs: shallowMergeAttributes(this.#attrs, prefixOrAttrs), + writers: this.#writers, + }); + } + + // + + public log(level: LogLevel): void; + public log(level: LogLevel, attrs: MaybeLazy): void; + public log(level: LogLevel, msg: string, ...interpol: unknown[]): void; + public log( + level: LogLevel, + attrs: MaybeLazy, + msg: string, + ...interpol: unknown[] + ): void; + public log( + level: LogLevel, + maybeAttrsOrMsg?: MaybeLazy | string | null | undefined, + ...rest: unknown[] + ): void { + if (!shouldLog(this.#level, level)) { + return; + } + + let msg: string | undefined; + let attrs: MaybeLazy | undefined; + if (typeof maybeAttrsOrMsg === 'string') { + msg = maybeAttrsOrMsg; + } else if (maybeAttrsOrMsg) { + attrs = maybeAttrsOrMsg; + if (typeof rest[0] === 'string') { + // we shift because the "rest" becomes "interpol" + msg = rest.shift() as string; + } + } + + if (this.#prefix) { + msg = `${this.#prefix}${msg || ''}`.trim(); // we trim everything because maybe the "msg" is empty + } + + attrs = shallowMergeAttributes(parseAttrs(this.#attrs), parseAttrs(attrs)); + + msg = + msg && rest.length + ? format(msg, rest, { stringify: fastSafeStringify }) + : msg; + + this.write(level, attrs, msg); + if (truthyEnv('LOG_TRACE_LOGS')) { + console.trace('👆'); + } + } + + public trace(): void; + public trace(attrs: MaybeLazy): void; + public trace(msg: string, ...interpol: unknown[]): void; + public trace( + attrs: MaybeLazy, + msg: string, + ...interpol: unknown[] + ): void; + public trace(...args: any): void { + this.log( + 'trace', + // @ts-expect-error + ...args, + ); + } + + public debug(): void; + public debug(attrs: MaybeLazy): void; + public debug(msg: string, ...interpol: unknown[]): void; + public debug( + attrs: MaybeLazy, + msg: string, + ...interpol: unknown[] + ): void; + public debug(...args: any): void { + this.log( + 'debug', + // @ts-expect-error + ...args, + ); + } + + public info(): void; + public info(attrs: MaybeLazy): void; + public info(msg: string, ...interpol: unknown[]): void; + public info( + attrs: MaybeLazy, + msg: string, + ...interpol: unknown[] + ): void; + public info(...args: any): void { + this.log( + 'info', + // @ts-expect-error + ...args, + ); + } + + public warn(): void; + public warn(attrs: MaybeLazy): void; + public warn(msg: string, ...interpol: unknown[]): void; + public warn( + attrs: MaybeLazy, + msg: string, + ...interpol: unknown[] + ): void; + public warn(...args: any): void { + this.log( + 'warn', + // @ts-expect-error + ...args, + ); + } + + public error(): void; + public error(attrs: MaybeLazy): void; + public error(msg: string, ...interpol: unknown[]): void; + public error( + attrs: MaybeLazy, + msg: string, + ...interpol: unknown[] + ): void; + public error(...args: any): void { + this.log( + 'error', + // @ts-expect-error + ...args, + ); + } +} diff --git a/packages/logger/src/request.ts b/packages/logger/src/request.ts new file mode 100644 index 000000000..43239bf70 --- /dev/null +++ b/packages/logger/src/request.ts @@ -0,0 +1,20 @@ +import { Logger } from './logger'; + +export const requestIdByRequest = new WeakMap(); + +const loggerByRequest = new WeakMap(); + +/** + * Gets the {@link Logger} of for the {@link request}. + * + * If the request does not have a logger, the provided {@link log} + * will be associated to the {@link request} and returned. + */ +export function loggerForRequest(log: Logger, request: Request): Logger { + const reqLog = loggerByRequest.get(request); + if (reqLog) { + return reqLog; + } + loggerByRequest.set(request, log); + return log; +} diff --git a/packages/logger/src/utils.ts b/packages/logger/src/utils.ts new file mode 100644 index 000000000..fbfaae167 --- /dev/null +++ b/packages/logger/src/utils.ts @@ -0,0 +1,229 @@ +import { LogLevel } from './logger'; + +export type MaybeLazy = T | (() => T); + +export type AttributeValue = any; + +export type Attributes = + | AttributeValue[] + | { [key: string | number]: AttributeValue }; + +export const logLevel: { [level in LogLevel]: number } = { + trace: 0, + debug: 1, + info: 2, + warn: 3, + error: 4, +}; + +export function shouldLog( + setLevel: MaybeLazy, + loggingLevel: LogLevel, +): boolean { + setLevel = typeof setLevel === 'function' ? setLevel() : setLevel; + return ( + setLevel !== false && // logging is not disabled + logLevel[setLevel] <= logLevel[loggingLevel] // and set log level is less than or equal to logging level + ); +} + +export function logLevelToString(level: LogLevel): string { + switch (level) { + case 'trace': + return 'TRC'; + case 'debug': + return 'DBG'; + case 'info': + return 'INF'; + case 'warn': + return 'WRN'; + case 'error': + return 'ERR'; + default: + throw new Error(`Unknown log level "${level}"`); + } +} + +export function isPromise(val: unknown): val is Promise { + const obj = Object(val); + return ( + typeof obj.then === 'function' && + typeof obj.catch === 'function' && + typeof obj.finally === 'function' + ); +} + +/** Recursivelly unwrapps the lazy attributes and parses instances of classes. */ +export function parseAttrs( + attrs: MaybeLazy | undefined, + functionUnwrapDepth = 0, +): Attributes | undefined { + if (functionUnwrapDepth > 3) { + throw new Error('Too much recursion while unwrapping function attributes'); + } + + if (!attrs) { + return undefined; + } + + if (typeof attrs === 'function') { + return parseAttrs(attrs(), functionUnwrapDepth + 1); + } + + if (Array.isArray(attrs)) { + return attrs.map((val) => unwrapAttrVal(val)); + } + + if (isPlainObject(attrs)) { + const unwrapped: Attributes = {}; + for (const key of Object.keys(attrs)) { + const val = attrs[key as keyof typeof attrs]; + unwrapped[key] = unwrapAttrVal(val); + } + return unwrapped; + } + + return objectifyClass(attrs); +} + +function unwrapAttrVal( + attr: AttributeValue, + visited = new WeakSet(), +): AttributeValue { + if (!attr) { + return attr; + } + + if (isPrimitive(attr)) { + return attr; + } + + if (typeof attr === 'function') { + return `[Function: ${attr.name || '(anonymous)'}]`; + } + + if (visited.has(attr)) { + return '[Circular]'; + } + visited.add(attr); + + if (Array.isArray(attr)) { + return attr.map((val) => unwrapAttrVal(val)); + } + + if (isPlainObject(attr)) { + const unwrapped: { [key: string | number]: AttributeValue } = {}; + for (const key of Object.keys(attr)) { + const val = attr[key as keyof typeof attr]; + unwrapped[key] = unwrapAttrVal(val, visited); + } + return unwrapped; + } + + // very likely an instance of something, dont unwrap it + return objectifyClass(attr); +} + +function isPrimitive(val: unknown): val is string | number | boolean { + return val !== Object(val); +} + +const nodejsCustomInspectSy = Symbol.for('nodejs.util.inspect.custom'); + +function objectifyClass(val: unknown): Record { + if ( + // simply empty + !val || + // Object.create(null) + Object(val).__proto__ == null + ) { + return {}; + } + if ( + typeof val === 'object' && + 'toJSON' in val && + typeof val.toJSON === 'function' + ) { + // if the object has a toJSON method, use it - always + return val.toJSON(); + } + if ( + typeof val === 'object' && + nodejsCustomInspectSy in val && + typeof val[nodejsCustomInspectSy] === 'function' + ) { + // > Custom [util.inspect.custom](depth, opts, inspect) functions typically return a string but may return a value of any type that will be formatted accordingly by util.inspect(). + return { + [nodejsCustomInspectSy.toString()]: unwrapAttrVal( + val[nodejsCustomInspectSy](Infinity, {}), + ), + class: val.constructor.name, + }; + } + const props: Record = {}; + for (const propName of Object.getOwnPropertyNames(val)) { + props[propName] = unwrapAttrVal(val[propName as keyof typeof val]); + } + for (const protoPropName of Object.getOwnPropertyNames( + Object.getPrototypeOf(val), + )) { + const propVal = val[protoPropName as keyof typeof val]; + if (typeof propVal === 'function') { + continue; + } + props[protoPropName] = unwrapAttrVal(propVal); + } + return { + ...props, + class: val.constructor.name, + }; +} + +export function getEnv(key: string): string | undefined { + return ( + globalThis.process?.env?.[key] || + // @ts-expect-error can exist in wrangler and maybe other runtimes + globalThis.env?.[key] || + // @ts-expect-error can exist in deno + globalThis.Deno?.env?.get(key) || + // @ts-expect-error could be + globalThis[key] + ); +} + +export function truthyEnv(key: string): boolean { + return ['1', 't', 'true', 'y', 'yes'].includes( + getEnv(key)?.toLowerCase() || '', + ); +} + +export function shallowMergeAttributes( + target: Attributes | undefined, + source: Attributes | undefined, +): Attributes | undefined { + switch (true) { + case Array.isArray(source) && Array.isArray(target): + // both are arrays + return [...target, ...source]; + case Array.isArray(source): + // only "source" is an array + return target ? [target, ...source] : source; + case Array.isArray(target): + // only "target" is an array + return source ? [...target, source] : target; + case !!(target || source): + // neither are arrays, but at least one is an object + return { ...target, ...source }; + default: + // neither are provided + return undefined; + } +} + +/** Checks whether the value is a plan object and not an instance of any other class. */ +function isPlainObject(val: unknown): val is Object { + return ( + Object(val).constructor === Object && + Object.getPrototypeOf(val) === Object.prototype + ); +} diff --git a/packages/logger/src/writers/common.ts b/packages/logger/src/writers/common.ts new file mode 100644 index 000000000..4bad07367 --- /dev/null +++ b/packages/logger/src/writers/common.ts @@ -0,0 +1,14 @@ +import fastSafeStringify from 'fast-safe-stringify'; +import { Attributes, LogLevel } from '../logger'; + +export function jsonStringify(val: unknown, pretty?: boolean): string { + return fastSafeStringify(val, undefined, pretty ? 2 : undefined); +} + +export interface LogWriter { + write( + level: LogLevel, + attrs: Attributes | null | undefined, + msg: string | null | undefined, + ): void | Promise; +} diff --git a/packages/logger/src/writers/console.ts b/packages/logger/src/writers/console.ts new file mode 100644 index 000000000..cec5def1f --- /dev/null +++ b/packages/logger/src/writers/console.ts @@ -0,0 +1,124 @@ +import { LogLevel } from '../logger'; +import { Attributes, logLevelToString, truthyEnv } from '../utils'; +import { jsonStringify, LogWriter } from './common'; + +const asciMap = { + timestamp: '\x1b[90m', // bright black + trace: '\x1b[36m', // cyan + debug: '\x1b[90m', // bright black + info: '\x1b[32m', // green + warn: '\x1b[33m', // yellow + error: '\x1b[41;39m', // red; white + message: '\x1b[1m', // bold + key: '\x1b[35m', // magenta + reset: '\x1b[0m', // reset +}; + +export interface ConsoleLogWriterOptions { + /** @default globalThis.Console */ + console?: Pick; + /** + * Whether to disable colors in the console output. + * + * @default env.NO_COLOR || false + */ + noColor?: boolean; + /** + * Whether to include the timestamp at the beginning of the log message. + * + * @default false + */ + noTimestamp?: boolean; +} + +export class ConsoleLogWriter implements LogWriter { + #console: NonNullable; + #noColor: boolean; + #noTimestamp: boolean; + constructor(opts: ConsoleLogWriterOptions = {}) { + const { + console = globalThis.console, + // no color if we're running in browser-like (edge) environments + noColor = typeof process === 'undefined' || + // or no color if https://no-color.org/ + truthyEnv('NO_COLOR'), + noTimestamp = false, + } = opts; + this.#console = console; + this.#noColor = noColor; + this.#noTimestamp = noTimestamp; + } + color( + style: keyof typeof asciMap, + text: T, + ): T { + if (!text) { + return text; + } + if (this.#noColor) { + return text; + } + return (asciMap[style] + text + asciMap.reset) as T; + } + write( + level: LogLevel, + attrs: Attributes | null | undefined, + msg: string | null | undefined, + ): void { + this.#console[level === 'trace' ? 'debug' : level]( + [ + !this.#noTimestamp && this.color('timestamp', new Date().toISOString()), + this.color(level, logLevelToString(level)), + this.color('message', msg), + attrs && this.stringifyAttrs(attrs), + ] + .filter(Boolean) + .join(' '), + ); + } + stringifyAttrs(attrs: Attributes): string { + let log = '\n'; + + for (const line of jsonStringify(attrs, true).split('\n')) { + // remove the first and last line the opening and closing brackets + if (line === '{' || line === '}' || line === '[' || line === ']') { + continue; + } + + let formattedLine = line; + + // remove the quotes from the keys and remove the opening bracket + // TODO: make sure keys with quotes are preserved + formattedLine = formattedLine.replace( + /"([^"]+)":/, + this.color('key', '$1:'), + ); + + // replace all escaped new lines with a new line and append the indentation of the line + let indentationSize = line.match(/^\s*/)?.[0]?.length || 0; + if (indentationSize) indentationSize++; + + // TODO: error stack traces will have 4 spaces of indentation, should we sanitize all 4 spaces / tabs to 2 space indentation? + formattedLine = formattedLine.replaceAll( + /\\n/g, + '\n' + [...Array(indentationSize)].join(' '), + ); + + // remove the ending comma + formattedLine = formattedLine.replace(/,$/, ''); + + // color the opening and closing brackets + formattedLine = formattedLine.replace( + /(\[|\{|\]|\})$/, + this.color('key', '$1'), + ); + + log += formattedLine + '\n'; + } + + // remove last new line + log = log.slice(0, -1); + + return log; + } +} diff --git a/packages/logger/src/writers/index.ts b/packages/logger/src/writers/index.ts new file mode 100644 index 000000000..346b943f9 --- /dev/null +++ b/packages/logger/src/writers/index.ts @@ -0,0 +1,4 @@ +export * from './common'; +export * from './console'; +export * from './json'; +export * from './memory'; diff --git a/packages/logger/src/writers/json.ts b/packages/logger/src/writers/json.ts new file mode 100644 index 000000000..2f5cd6fa9 --- /dev/null +++ b/packages/logger/src/writers/json.ts @@ -0,0 +1,23 @@ +import { LogLevel } from '../logger'; +import { Attributes, truthyEnv } from '../utils'; +import { jsonStringify, LogWriter } from './common'; + +export class JSONLogWriter implements LogWriter { + write( + level: LogLevel, + attrs: Attributes | null | undefined, + msg: string | null | undefined, + ): void { + console.log( + jsonStringify( + { + ...attrs, + level, + ...(msg ? { msg } : {}), + timestamp: new Date().toISOString(), + }, + truthyEnv('LOG_JSON_PRETTY'), + ), + ); + } +} diff --git a/packages/logger/src/writers/memory.ts b/packages/logger/src/writers/memory.ts new file mode 100644 index 000000000..5808b2d54 --- /dev/null +++ b/packages/logger/src/writers/memory.ts @@ -0,0 +1,18 @@ +import { LogLevel } from '../logger'; +import { Attributes } from '../utils'; +import { LogWriter } from './common'; + +export class MemoryLogWriter implements LogWriter { + public logs: { level: LogLevel; msg?: string; attrs?: unknown }[] = []; + write( + level: LogLevel, + attrs: Attributes | null | undefined, + msg: string | null | undefined, + ): void { + this.logs.push({ + level, + ...(msg ? { msg } : {}), + ...(attrs ? { attrs } : {}), + }); + } +} diff --git a/packages/logger/src/writers/pino.ts b/packages/logger/src/writers/pino.ts new file mode 100644 index 000000000..84ea435bb --- /dev/null +++ b/packages/logger/src/writers/pino.ts @@ -0,0 +1,18 @@ +import type { BaseLogger as PinoLogger } from 'pino'; +import { LogLevel } from '../logger'; +import { Attributes } from '../utils'; +import { LogWriter } from './common'; + +export class PinoLogWriter implements LogWriter { + #pinoLogger: PinoLogger; + constructor(pinoLogger: PinoLogger) { + this.#pinoLogger = pinoLogger; + } + write( + level: LogLevel, + attrs: Attributes | null | undefined, + msg: string | null | undefined, + ): void { + this.#pinoLogger[level](attrs, msg || undefined); + } +} diff --git a/packages/logger/src/writers/winston.ts b/packages/logger/src/writers/winston.ts new file mode 100644 index 000000000..6a5460847 --- /dev/null +++ b/packages/logger/src/writers/winston.ts @@ -0,0 +1,22 @@ +import type { Logger as WinstonLogger } from 'winston'; +import { LogLevel } from '../logger'; +import { Attributes } from '../utils'; +import { LogWriter } from './common'; + +export class WinstonLogWriter implements LogWriter { + #winstonLogger: WinstonLogger; + constructor(winstonLogger: WinstonLogger) { + this.#winstonLogger = winstonLogger; + } + write( + level: LogLevel, + attrs: Attributes | null | undefined, + msg: string | null | undefined, + ): void { + if (msg) { + this.#winstonLogger[level === 'trace' ? 'verbose' : level](msg, attrs); + } else { + this.#winstonLogger[level === 'trace' ? 'verbose' : level](attrs); + } + } +} diff --git a/packages/logger/tests/__snapshots__/writers.test.ts.snap b/packages/logger/tests/__snapshots__/writers.test.ts.snap new file mode 100644 index 000000000..7b0ee8652 --- /dev/null +++ b/packages/logger/tests/__snapshots__/writers.test.ts.snap @@ -0,0 +1,22 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`ConsoleLogWriter should color levels and keys 1`] = ` +[ + ""\\u001b[36mTRC\\u001b[0m \\u001b[1mhi\\u001b[0m \\n \\u001b[35mhello:\\u001b[0m \\u001b[35m{\\u001b[0m\\n \\u001b[35mdear:\\u001b[0m \\"world\\"\\n \\u001b[35mtry:\\u001b[0m \\u001b[35m[\\u001b[0m\\n \\"num\\"\\n 1\\n 2\\n \\u001b[35m]\\u001b[0m\\n \\u001b[35m}\\u001b[0m"", + ""\\u001b[90mDBG\\u001b[0m \\u001b[1mhi\\u001b[0m \\n \\u001b[35mhello:\\u001b[0m \\u001b[35m{\\u001b[0m\\n \\u001b[35mdear:\\u001b[0m \\"world\\"\\n \\u001b[35mtry:\\u001b[0m \\u001b[35m[\\u001b[0m\\n \\"num\\"\\n 1\\n 2\\n \\u001b[35m]\\u001b[0m\\n \\u001b[35m}\\u001b[0m"", + ""\\u001b[32mINF\\u001b[0m \\u001b[1mhi\\u001b[0m \\n \\u001b[35mhello:\\u001b[0m \\u001b[35m{\\u001b[0m\\n \\u001b[35mdear:\\u001b[0m \\"world\\"\\n \\u001b[35mtry:\\u001b[0m \\u001b[35m[\\u001b[0m\\n \\"num\\"\\n 1\\n 2\\n \\u001b[35m]\\u001b[0m\\n \\u001b[35m}\\u001b[0m"", + ""\\u001b[33mWRN\\u001b[0m \\u001b[1mhi\\u001b[0m \\n \\u001b[35mhello:\\u001b[0m \\u001b[35m{\\u001b[0m\\n \\u001b[35mdear:\\u001b[0m \\"world\\"\\n \\u001b[35mtry:\\u001b[0m \\u001b[35m[\\u001b[0m\\n \\"num\\"\\n 1\\n 2\\n \\u001b[35m]\\u001b[0m\\n \\u001b[35m}\\u001b[0m"", + ""\\u001b[41;39mERR\\u001b[0m \\u001b[1mhi\\u001b[0m \\n \\u001b[35mhello:\\u001b[0m \\u001b[35m{\\u001b[0m\\n \\u001b[35mdear:\\u001b[0m \\"world\\"\\n \\u001b[35mtry:\\u001b[0m \\u001b[35m[\\u001b[0m\\n \\"num\\"\\n 1\\n 2\\n \\u001b[35m]\\u001b[0m\\n \\u001b[35m}\\u001b[0m"", +] +`; + +exports[`ConsoleLogWriter should pretty print the attributes 1`] = ` +[ + ""TRC obj \\n a: 1\\n b: 2"", + ""DBG arr \\n \\"a\\"\\n \\"b\\"\\n \\"c\\""", + ""INF nested \\n a: {\\n b: {\\n c: {\\n d: 1\\n }\\n }\\n }"", + ""WRN arr objs \\n {\\n a: 1\\n }\\n {\\n b: 2\\n }"", + ""ERR multlinestring \\n str: \\"a\\n b\\n c\\"\\n err: {\\n message: \\"woah!\\"\\n }"", + ""INF graphql \\n query: \\"\\n {\\n hi(howMany: 1) {\\n hello\\n world\\n }\\n }\\n \\""", +] +`; diff --git a/packages/logger/tests/legacyLogger.test.ts b/packages/logger/tests/legacyLogger.test.ts new file mode 100644 index 000000000..574074833 --- /dev/null +++ b/packages/logger/tests/legacyLogger.test.ts @@ -0,0 +1,94 @@ +import { LegacyLogger } from '@graphql-hive/logger'; +import { Logger as MeshLogger } from '@graphql-mesh/types'; +import { expect, it } from 'vitest'; +import { Logger, LoggerOptions } from '../src/logger'; +import { MemoryLogWriter } from '../src/writers'; + +// a type test making sure the LegacyLogger is compatible with the MeshLogger +export const _: MeshLogger = new LegacyLogger(null as any); + +function createTLogger(opts?: Partial) { + const writer = new MemoryLogWriter(); + return [ + LegacyLogger.from( + new Logger({ ...opts, writers: opts?.writers ? opts.writers : [writer] }), + ), + writer, + ] as const; +} + +it('should correctly write legacy logger logs', () => { + const [log, writer] = createTLogger(); + + log.info('hello world'); + log.info({ hello: 'world' }); + log.info('hello', { wor: 'ld' }); + log.info('hello', [{ wor: 'ld' }]); + log.info('hello', { w: 'o' }, { rl: 'd' }); + log.info('hello', 'world'); + + log.child('child ').info('hello child'); + log.child({ chi: 'ld' }).info('hello chi ld'); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "level": "info", + "msg": "hello world", + }, + { + "attrs": { + "hello": "world", + }, + "level": "info", + }, + { + "attrs": [ + { + "wor": "ld", + }, + ], + "level": "info", + "msg": "hello", + }, + { + "attrs": [ + [ + { + "wor": "ld", + }, + ], + ], + "level": "info", + "msg": "hello", + }, + { + "attrs": [ + { + "w": "o", + }, + { + "rl": "d", + }, + ], + "level": "info", + "msg": "hello", + }, + { + "attrs": [ + "world", + ], + "level": "info", + "msg": "hello", + }, + { + "level": "info", + "msg": "child hello child", + }, + { + "level": "info", + "msg": "chi=ld hello chi ld", + }, + ] + `); +}); diff --git a/packages/logger/tests/logger.bench.ts b/packages/logger/tests/logger.bench.ts new file mode 100644 index 000000000..bd4a3eefa --- /dev/null +++ b/packages/logger/tests/logger.bench.ts @@ -0,0 +1,58 @@ +import { jsonStringify, Logger } from '@graphql-hive/logger'; +import { bench, describe } from 'vitest'; + +const voidlog = new Logger({ + writers: [ + { + write() { + // void + }, + }, + ], +}); + +describe.each([ + { name: 'string' as const, value: 'hello' }, + { name: 'integer' as const, value: 7 }, + { name: 'float' as const, value: 7.77 }, + { name: 'object' as const, value: { hello: 'world' } }, +])('log formatting of $name', ({ name, value }) => { + // we switch outside of the bench to avoid the overhead of the switch + switch (name) { + case 'string': + bench('template literals', () => { + voidlog.info(`hi there ${value}`); + }); + bench('interpolation', () => { + voidlog.info('hi there %s', value); + }); + break; + case 'integer': + bench('template literals', () => { + voidlog.info(`hi there ${value}`); + }); + bench('interpolation', () => { + voidlog.info('hi there %i', value); + }); + break; + case 'float': + bench('template literals', () => { + voidlog.info(`hi there ${value}`); + }); + bench('interpolation', () => { + voidlog.info('hi there %d', value); + }); + break; + case 'object': + bench('template literals native stringify', () => { + voidlog.info(`hi there ${JSON.stringify(value)}`); + }); + bench('template literals logger stringify', () => { + voidlog.info(`hi there ${jsonStringify(value)}`); + }); + bench('interpolation', () => { + voidlog.info('hi there %o', value); + }); + break; + } +}); diff --git a/packages/logger/tests/logger.test.ts b/packages/logger/tests/logger.test.ts new file mode 100644 index 000000000..d98ca15c3 --- /dev/null +++ b/packages/logger/tests/logger.test.ts @@ -0,0 +1,811 @@ +import { setTimeout } from 'node:timers/promises'; +import { expect, it, vi } from 'vitest'; +import { Logger, LoggerOptions } from '../src/logger'; +import { MemoryLogWriter } from '../src/writers'; +import { stableError } from './utils'; + +function createTLogger(opts?: Partial) { + const writer = new MemoryLogWriter(); + return [ + new Logger({ ...opts, writers: opts?.writers ? opts.writers : [writer] }), + writer, + ] as const; +} + +it('should write logs with levels, message and attributes', () => { + const [log, writter] = createTLogger(); + + const err = stableError(new Error('Woah!')); + + log.log('info'); + log.log('info', { hello: 'world', err }, 'Hello, world!'); + log.log('info', '2nd Hello, world!'); + + expect(writter.logs).toMatchInlineSnapshot(` + [ + { + "level": "info", + }, + { + "attrs": { + "err": { + "class": "Error", + "message": "Woah!", + "name": "Error", + "stack": "", + }, + "hello": "world", + }, + "level": "info", + "msg": "Hello, world!", + }, + { + "level": "info", + "msg": "2nd Hello, world!", + }, + ] + `); +}); + +it('should write logs only if level is higher than set', () => { + const [log, writter] = createTLogger({ + level: 'info', + }); + + log.trace('Trace'); + log.debug('Debug'); + log.info('Info'); + log.warn('Warn'); + log.error('Error'); + + expect(writter.logs).toMatchInlineSnapshot(` + [ + { + "level": "info", + "msg": "Info", + }, + { + "level": "warn", + "msg": "Warn", + }, + { + "level": "error", + "msg": "Error", + }, + ] + `); +}); + +it('should include attributes in child loggers', () => { + let [log, writter] = createTLogger(); + + log = log.child({ par: 'ent' }); + + log.info('hello'); + + expect(writter.logs).toMatchInlineSnapshot(` + [ + { + "attrs": { + "par": "ent", + }, + "level": "info", + "msg": "hello", + }, + ] + `); +}); + +it('should include prefix in child loggers', () => { + let [log, writter] = createTLogger(); + + log = log.child('prefix '); + + log.info('hello'); + + expect(writter.logs).toMatchInlineSnapshot(` + [ + { + "level": "info", + "msg": "prefix hello", + }, + ] + `); +}); + +it('should include attributes and prefix in child loggers', () => { + let [log, writter] = createTLogger(); + + log = log.child({ par: 'ent' }, 'prefix '); + + log.info('hello'); + + expect(writter.logs).toMatchInlineSnapshot(` + [ + { + "attrs": { + "par": "ent", + }, + "level": "info", + "msg": "prefix hello", + }, + ] + `); +}); + +it('should have child inherit parent log level', () => { + let [log, writter] = createTLogger({ level: 'warn' }); + + log = log.child({ par: 'ent' }); + + log.debug('no hello'); + log.info('still no hello'); + log.warn('hello'); + + expect(writter.logs).toMatchInlineSnapshot(` + [ + { + "attrs": { + "par": "ent", + }, + "level": "warn", + "msg": "hello", + }, + ] + `); +}); + +it('should include attributes and prefix in nested child loggers', () => { + let [log, writter] = createTLogger(); + + log = log.child({ par: 'ent' }, 'prefix '); + log = log.child({ par2: 'ent2' }, 'prefix2 '); + + log.info('hello'); + + expect(writter.logs).toMatchInlineSnapshot(` + [ + { + "attrs": { + "par": "ent", + "par2": "ent2", + }, + "level": "info", + "msg": "prefix prefix2 hello", + }, + ] + `); +}); + +it('should unwrap lazy attribute values', () => { + const [log, writter] = createTLogger(); + + log.info( + () => ({ + every: 'thing', + nested: { + lazy: () => 'nested lazy not unwrapped', + }, + }), + 'hello', + ); + + expect(writter.logs).toMatchInlineSnapshot(` + [ + { + "attrs": { + "every": "thing", + "nested": { + "lazy": "[Function: lazy]", + }, + }, + "level": "info", + "msg": "hello", + }, + ] + `); +}); + +it('should not log lazy attributes returning nothing', () => { + const [log, writter] = createTLogger(); + + log.info(() => undefined, 'hello'); + log.info(() => null, 'wor'); + log.info(() => void 0, 'ld'); + + expect(writter.logs).toMatchInlineSnapshot(` + [ + { + "level": "info", + "msg": "hello", + }, + { + "level": "info", + "msg": "wor", + }, + { + "level": "info", + "msg": "ld", + }, + ] + `); +}); + +it('should not unwrap lazy attribute values', () => { + const [log, writter] = createTLogger(); + + log.info( + { + lazy: () => 'lazy', + nested: { + lazy: () => 'nested lazy', + }, + arr: [() => '0', '1'], + }, + 'hello', + ); + + expect(writter.logs).toMatchInlineSnapshot(` + [ + { + "attrs": { + "arr": [ + "[Function: (anonymous)]", + "1", + ], + "lazy": "[Function: lazy]", + "nested": { + "lazy": "[Function: lazy]", + }, + }, + "level": "info", + "msg": "hello", + }, + ] + `); +}); + +it('should not unwrap lazy attributes if level is not to be logged', () => { + const [log] = createTLogger({ + level: 'info', + }); + + const lazy = vi.fn(() => ({ la: 'zy' })); + log.debug(lazy, 'hello'); + + expect(lazy).not.toHaveBeenCalled(); +}); + +it('should wait for async writers on flush', async () => { + const logs: any[] = []; + const log = new Logger({ + writers: [ + { + async write(level, attrs, msg) { + await setTimeout(10); + logs.push({ level, attrs, msg }); + }, + }, + ], + }); + + log.info('hello'); + log.info('world'); + + // not flushed yet + expect(logs).toMatchInlineSnapshot(`[]`); + + await log.flush(); + + // flushed + expect(logs).toMatchInlineSnapshot(` + [ + { + "attrs": undefined, + "level": "info", + "msg": "hello", + }, + { + "attrs": undefined, + "level": "info", + "msg": "world", + }, + ] + `); +}); + +it('should handle async write errors on flush', async () => { + const origConsoleError = console.error; + console.error = vi.fn(() => { + // noop + }); + using _ = { + [Symbol.dispose]() { + console.error = origConsoleError; + }, + }; + + let i = 0; + const log = new Logger({ + writers: [ + { + async write() { + i++; + throw new Error('Write failed! #' + i); + }, + }, + ], + }); + + // no fail + log.info('hello'); + log.info('world'); + + try { + await log.flush(); + throw new Error('should not have reached here'); + } catch (e) { + expect(e).toMatchInlineSnapshot( + `[AggregateError: Failed to flush 2 writes]`, + ); + expect((e as AggregateError).errors).toMatchInlineSnapshot(` + [ + [Error: Write failed! #1], + [Error: Write failed! #2], + ] + `); + expect(console.error).toHaveBeenCalledTimes(2); + } +}); + +it('should wait for async writers on async dispose', async () => { + const logs: any[] = []; + + { + await using log = new Logger({ + writers: [ + { + async write(level, attrs, msg) { + await setTimeout(10); + logs.push({ level, attrs, msg }); + }, + }, + ], + }); + + log.info('hello'); + log.info('world'); + + // not flushed yet + expect(logs).toMatchInlineSnapshot(`[]`); + } + + // flushed because scope ended and async dispose was called + expect(logs).toMatchInlineSnapshot(` + [ + { + "attrs": undefined, + "level": "info", + "msg": "hello", + }, + { + "attrs": undefined, + "level": "info", + "msg": "world", + }, + ] + `); +}); + +it('should log array attributes with object child attributes', () => { + let [log, writer] = createTLogger(); + + log = log.child({ hello: 'world' }); + log.info(['hello', 'world']); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "attrs": [ + { + "hello": "world", + }, + "hello", + "world", + ], + "level": "info", + }, + ] + `); +}); + +it('should log array child attributes with object attributes', () => { + let [log, writer] = createTLogger(); + + log = log.child(['hello', 'world']); + log.info({ hello: 'world' }); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "attrs": [ + "hello", + "world", + { + "hello": "world", + }, + ], + "level": "info", + }, + ] + `); +}); + +it('should log array child attributes with array attributes', () => { + let [log, writer] = createTLogger(); + + log = log.child(['hello', 'world']); + log.info(['more', 'life']); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "attrs": [ + "hello", + "world", + "more", + "life", + ], + "level": "info", + }, + ] + `); +}); + +it('should format string', () => { + const [log, writer] = createTLogger(); + + log.info('%o hello %s', { worldly: 1 }, 'world'); + log.info({ these: { are: 'attrs' } }, '%o hello %s', { worldly: 1 }, 'world'); + log.info('hello %s %j %d %o', 'world', { obj: true }, 4, { another: 'obj' }); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "level": "info", + "msg": "{"worldly":1} hello world", + }, + { + "attrs": { + "these": { + "are": "attrs", + }, + }, + "level": "info", + "msg": "{"worldly":1} hello world", + }, + { + "level": "info", + "msg": "hello world {"obj":true} 4 {"another":"obj"}", + }, + ] + `); +}); + +it('should write logs with unexpected attributes', () => { + const [log, writer] = createTLogger(); + + const err = stableError(new Error('Woah!')); + + log.info(err); + + log.info([err, { denis: 'badurina' }, ['hello'], 'world']); + + class MyClass { + constructor(public someprop: string) {} + get getsomeprop() { + return this.someprop; + } + } + log.info(new MyClass('hey')); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "attrs": { + "class": "Error", + "message": "Woah!", + "name": "Error", + "stack": "", + }, + "level": "info", + }, + { + "attrs": [ + { + "class": "Error", + "message": "Woah!", + "name": "Error", + "stack": "", + }, + { + "denis": "badurina", + }, + [ + "hello", + ], + "world", + ], + "level": "info", + }, + { + "attrs": { + "class": "MyClass", + "getsomeprop": "hey", + "someprop": "hey", + }, + "level": "info", + }, + ] + `); +}); + +it('should serialise using the toJSON method', () => { + const [log, writer] = createTLogger(); + + class ToJSON { + toJSON() { + return { hello: 'world' }; + } + } + + log.info(new ToJSON(), 'hello'); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "attrs": { + "hello": "world", + }, + "level": "info", + "msg": "hello", + }, + ] + `); +}); + +it('should serialise error causes', () => { + const [log, writer] = createTLogger(); + + const cause = stableError(new Error('Cause')); + + const err = stableError(new Error('Woah!', { cause })); + + log.info(err, 'hello'); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "attrs": { + "cause": { + "class": "Error", + "message": "Cause", + "name": "Error", + "stack": "", + }, + "class": "Error", + "message": "Woah!", + "name": "Error", + "stack": "", + }, + "level": "info", + "msg": "hello", + }, + ] + `); +}); + +it('should gracefully handle Object.create(null)', () => { + const [log, writer] = createTLogger(); + + class NullConst { + constructor() { + return Object.create(null); + } + } + class NullProp { + someprop = Object.create(null); + } + + log.info({ class: new NullConst() }, 'hello'); + log.info(new NullProp(), 'world'); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "attrs": { + "class": {}, + }, + "level": "info", + "msg": "hello", + }, + { + "attrs": { + "class": "NullProp", + "someprop": {}, + }, + "level": "info", + "msg": "world", + }, + ] + `); +}); + +it('should handle circular references', () => { + const [log, writer] = createTLogger(); + + const obj = { circ: null as any }; + const circ = { + hello: 'world', + obj, + }; + obj.circ = circ; + + log.info(circ, 'circular'); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "attrs": { + "hello": "world", + "obj": { + "circ": { + "hello": "world", + "obj": "[Circular]", + }, + }, + }, + "level": "info", + "msg": "circular", + }, + ] + `); +}); + +it('should serialise aggregate errors', () => { + const [log, writer] = createTLogger(); + + const err1 = stableError(new Error('Woah!')); + + const err2 = stableError(new Error('Woah2!')); + + const aggErr = stableError( + new AggregateError([err1, err2], 'Woah Aggregate!'), + ); + + log.info(aggErr, 'aggregate'); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "attrs": { + "class": "AggregateError", + "errors": [ + { + "class": "Error", + "message": "Woah!", + "name": "Error", + "stack": "", + }, + { + "class": "Error", + "message": "Woah2!", + "name": "Error", + "stack": "", + }, + ], + "message": "Woah Aggregate!", + "name": "AggregateError", + "stack": "", + }, + "level": "info", + "msg": "aggregate", + }, + ] + `); +}); + +it('should change log level', () => { + const [log, writer] = createTLogger(); + + log.info('hello'); + log.setLevel('warn'); + log.info('no hello'); + log.warn('yes hello'); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "level": "info", + "msg": "hello", + }, + { + "level": "warn", + "msg": "yes hello", + }, + ] + `); +}); + +it('should change root log level and propagate to child loggers', () => { + const [rootLog, writer] = createTLogger(); + + const childLog = rootLog.child('sub '); + + childLog.info('hello'); + rootLog.setLevel('warn'); + childLog.info('no hello'); + childLog.warn('yes hello'); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "level": "info", + "msg": "sub hello", + }, + { + "level": "warn", + "msg": "sub yes hello", + }, + ] + `); +}); + +it('should change child log level only on child', () => { + const [rootLog, writer] = createTLogger(); + + const childLog = rootLog.child('sub '); + + childLog.setLevel('warn'); + rootLog.info('yes hello'); // should still log because root didnt change + childLog.info('no hello'); + childLog.warn('yes hello'); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "level": "info", + "msg": "yes hello", + }, + { + "level": "warn", + "msg": "sub yes hello", + }, + ] + `); +}); + +it('should log using nodejs.util.inspect.custom symbol', () => { + const [log, writer] = createTLogger(); + + class Inspect { + [Symbol.for('nodejs.util.inspect.custom')]() { + return 'Ok good'; + } + } + + log.info(new Inspect(), 'sy'); + + expect(writer.logs).toMatchInlineSnapshot(` + [ + { + "attrs": { + "Symbol(nodejs.util.inspect.custom)": "Ok good", + "class": "Inspect", + }, + "level": "info", + "msg": "sy", + }, + ] + `); +}); diff --git a/packages/logger/tests/utils.ts b/packages/logger/tests/utils.ts new file mode 100644 index 000000000..1c448beca --- /dev/null +++ b/packages/logger/tests/utils.ts @@ -0,0 +1,20 @@ +/** Stabilises the error for snapshot testing */ +export function stableError(err: T): T { + if (globalThis.Bun) { + // bun serialises errors differently from node + // we need to remove some properties to make the snapshots match + // @ts-expect-error + delete err.column; + // @ts-expect-error + delete err.line; + // @ts-expect-error + delete err.originalColumn; + // @ts-expect-error + delete err.originalLine; + // @ts-expect-error + delete err.sourceURL; + } + // we remove the stack to make the snapshot stable + err.stack = ''; + return err; +} diff --git a/packages/logger/tests/writers.test.ts b/packages/logger/tests/writers.test.ts new file mode 100644 index 000000000..d3ae6227a --- /dev/null +++ b/packages/logger/tests/writers.test.ts @@ -0,0 +1,71 @@ +import { describe, expect, it } from 'vitest'; +import { Logger } from '../src/logger'; +import { + ConsoleLogWriter, + ConsoleLogWriterOptions, + jsonStringify, +} from '../src/writers'; + +describe('ConsoleLogWriter', () => { + function createTConsoleLogger(opts?: Partial) { + const logs: string[] = []; + const writer = new ConsoleLogWriter({ + console: { + debug: (...args: unknown[]) => { + logs.push(args.map((arg) => jsonStringify(arg)).join(' ')); + }, + info: (...args: unknown[]) => { + logs.push(args.map((arg) => jsonStringify(arg)).join(' ')); + }, + warn: (...args: unknown[]) => { + logs.push(args.map((arg) => jsonStringify(arg)).join(' ')); + }, + error: (...args: unknown[]) => { + logs.push(args.map((arg) => jsonStringify(arg)).join(' ')); + }, + }, + noTimestamp: true, + noColor: true, + ...opts, + }); + return [new Logger({ level: 'trace', writers: [writer] }), logs] as const; + } + + it('should pretty print the attributes', () => { + const [log, logs] = createTConsoleLogger(); + + log.trace({ a: 1, b: 2 }, 'obj'); + log.debug(['a', 'b', 'c'], 'arr'); + log.info({ a: { b: { c: { d: 1 } } } }, 'nested'); + log.warn([{ a: 1 }, { b: 2 }], 'arr objs'); + log.error({ str: 'a\nb\nc', err: { message: 'woah!' } }, 'multlinestring'); + + log.info( + { + query: ` +{ + hi(howMany: 1) { + hello + world + } +} +`, + }, + 'graphql', + ); + + expect(logs).toMatchSnapshot(); + }); + + it('should color levels and keys', () => { + const [log, logs] = createTConsoleLogger({ noColor: false }); + + log.trace({ hello: { dear: 'world', try: ['num', 1, 2] } }, 'hi'); + log.debug({ hello: { dear: 'world', try: ['num', 1, 2] } }, 'hi'); + log.info({ hello: { dear: 'world', try: ['num', 1, 2] } }, 'hi'); + log.warn({ hello: { dear: 'world', try: ['num', 1, 2] } }, 'hi'); + log.error({ hello: { dear: 'world', try: ['num', 1, 2] } }, 'hi'); + + expect(logs).toMatchSnapshot(); + }); +}); diff --git a/packages/nestjs/src/index.ts b/packages/nestjs/src/index.ts index e4f14f710..44e5be704 100644 --- a/packages/nestjs/src/index.ts +++ b/packages/nestjs/src/index.ts @@ -1,5 +1,6 @@ import { createGatewayRuntime, + createLoggerFromLogging, GatewayCLIBuiltinPluginConfig, GatewayConfigProxy, GatewayConfigSubgraph, @@ -11,10 +12,7 @@ import { PubSub, type GatewayRuntime, } from '@graphql-hive/gateway'; -import { - Logger as GatewayLogger, - type LazyLoggerMessage, -} from '@graphql-mesh/types'; +import { Logger as HiveLogger } from '@graphql-hive/logger'; import { asArray, type IResolvers, @@ -63,6 +61,7 @@ export class HiveGatewayDriver< private async ensureGatewayRuntime({ typeDefs, resolvers, + logging, ...options }: HiveGatewayDriverConfig) { if (this._gatewayRuntime) { @@ -78,14 +77,34 @@ export class HiveGatewayDriver< if (resolvers) { additionalResolvers.push(...asArray(resolvers)); } - const logger = new NestJSLoggerAdapter( - 'Hive Gateway', - {}, - new NestLogger('Hive Gateway'), - options.debug ?? truthy(process.env['DEBUG']), - ); + + let log: HiveLogger; + if (logging != null) { + log = createLoggerFromLogging(logging); + } else { + const nestLog = new NestLogger('Hive Gateway'); + log = new HiveLogger({ + writers: [ + { + write(level, attrs, msg) { + switch (level) { + case 'trace': + nestLog.verbose(msg, attrs); + break; + case 'info': + nestLog.log(msg, attrs); + break; + default: + nestLog[level](msg, attrs); + } + }, + }, + ], + }); + } + const configCtx = { - logger, + log, cwd: process.cwd(), pubsub: options.pubsub || new PubSub(), }; @@ -96,7 +115,7 @@ export class HiveGatewayDriver< }); this._gatewayRuntime = createGatewayRuntime({ ...options, - logging: configCtx.logger, + logging: configCtx.log, cache, graphqlEndpoint: options.path, additionalTypeDefs, @@ -287,100 +306,3 @@ export class HiveGatewayDriver< }); } } - -class NestJSLoggerAdapter implements GatewayLogger { - constructor( - public name: string, - private meta: Record, - private logger: NestLogger, - private isDebug: boolean, - ) {} - private prepareMessage(args: LazyLoggerMessage[]) { - const obj = { - ...(this.meta || {}), - }; - const strs: string[] = []; - const flattenedArgs = args - .flatMap((arg) => (typeof arg === 'function' ? arg() : arg)) - .flat(Number.POSITIVE_INFINITY); - for (const arg of flattenedArgs) { - if (typeof arg === 'string' || typeof arg === 'number') { - strs.push(arg.toString()); - } else { - Object.assign(obj, arg); - } - } - return { obj, str: strs.join(', ') }; - } - log(...args: any[]) { - const { obj, str } = this.prepareMessage(args); - if (Object.keys(obj).length) { - this.logger.log(obj, str); - } else { - this.logger.log(str); - } - } - info(...args: any[]) { - const { obj, str } = this.prepareMessage(args); - if (Object.keys(obj).length) { - this.logger.log(obj, str); - } else { - this.logger.log(str); - } - } - error(...args: any[]) { - const { obj, str } = this.prepareMessage(args); - if (Object.keys(obj).length) { - this.logger.error(obj, str); - } else { - this.logger.error(str); - } - } - warn(...args: any[]) { - const { obj, str } = this.prepareMessage(args); - if (Object.keys(obj).length) { - this.logger.warn(obj, str); - } else { - this.logger.warn(str); - } - } - debug(...args: any[]) { - if (!this.isDebug) { - return; - } - const { obj, str } = this.prepareMessage(args); - if (Object.keys(obj).length) { - this.logger.debug(obj, str); - } else { - this.logger.debug(str); - } - } - child( - newNameOrMeta: string | Record, - ): NestJSLoggerAdapter { - const newName = - typeof newNameOrMeta === 'string' - ? this.name - ? `${this.name}, ${newNameOrMeta}` - : newNameOrMeta - : this.name; - const newMeta = - typeof newNameOrMeta === 'string' - ? this.meta - : { ...this.meta, ...newNameOrMeta }; - return new NestJSLoggerAdapter( - newName, - newMeta, - new NestLogger(newName), - this.isDebug, - ); - } -} - -function truthy(val: unknown) { - return ( - val === true || - val === 1 || - ['1', 't', 'true', 'y', 'yes'].includes(String(val)) - ); -} diff --git a/packages/plugins/hmac-upstream-signature/src/index.ts b/packages/plugins/hmac-upstream-signature/src/index.ts index 3682c43c5..4bec1c30d 100644 --- a/packages/plugins/hmac-upstream-signature/src/index.ts +++ b/packages/plugins/hmac-upstream-signature/src/index.ts @@ -1,4 +1,6 @@ import type { GatewayPlugin } from '@graphql-hive/gateway-runtime'; +import type { Logger } from '@graphql-hive/logger'; +import { loggerForRequest } from '@graphql-hive/logger/request'; import type { OnSubgraphExecutePayload } from '@graphql-mesh/fusion-runtime'; import { serializeExecutionRequest } from '@graphql-tools/executor-common'; import type { ExecutionRequest } from '@graphql-tools/utils'; @@ -9,7 +11,6 @@ import { import type { FetchAPI, GraphQLParams, - YogaLogger, Plugin as YogaPlugin, } from 'graphql-yoga'; import jsonStableStringify from 'json-stable-stringify'; @@ -87,23 +88,22 @@ export function useHmacUpstreamSignature( let key$: MaybePromise; let fetchAPI: FetchAPI; let textEncoder: TextEncoder; - let yogaLogger: YogaLogger; return { onYogaInit({ yoga }) { fetchAPI = yoga.fetchAPI; - yogaLogger = yoga.logger; }, onSubgraphExecute({ subgraphName, subgraph, executionRequest, - logger = yogaLogger, + log: rootLog, }) { - logger?.debug(`running shouldSign for subgraph ${subgraphName}`); + const log = rootLog.child('[useHmacUpstreamSignature] '); + log.debug(`Running shouldSign for subgraph ${subgraphName}`); if (shouldSign({ subgraphName, subgraph, executionRequest })) { - logger?.debug( + log.debug( `shouldSign is true for subgraph ${subgraphName}, signing request`, ); textEncoder ||= new fetchAPI.TextEncoder(); @@ -128,8 +128,12 @@ export function useHmacUpstreamSignature( const extensionValue = fetchAPI.btoa( String.fromCharCode(...new Uint8Array(signature)), ); - logger?.debug( - `produced hmac signature for subgraph ${subgraphName}, signature: ${extensionValue}, signed payload: ${serializedExecutionRequest}`, + log.debug( + { + signature: extensionValue, + payload: serializedExecutionRequest, + }, + `Produced hmac signature for subgraph ${subgraphName}`, ); if (!executionRequest.extensions) { @@ -141,7 +145,7 @@ export function useHmacUpstreamSignature( }, ); } else { - logger?.debug( + log.debug( `shouldSign is false for subgraph ${subgraphName}, skipping hmac signature`, ); } @@ -150,6 +154,7 @@ export function useHmacUpstreamSignature( } export type HMACUpstreamSignatureValidationOptions = { + log: Logger; secret: string; extensionName?: string; serializeParams?: (params: GraphQLParams) => string; @@ -167,22 +172,17 @@ export function useHmacSignatureValidation( const extensionName = options.extensionName || DEFAULT_EXTENSION_NAME; let key$: MaybePromise; let textEncoder: TextEncoder; - let logger: YogaLogger; const paramsSerializer = options.serializeParams || defaultParamsSerializer; return { - onYogaInit({ yoga }) { - logger = yoga.logger; - }, - onParams({ params, fetchAPI }) { + onParams({ params, fetchAPI, request }) { + const log = loggerForRequest(options.log, request).child( + '[useHmacSignatureValidation] ', + ); textEncoder ||= new fetchAPI.TextEncoder(); const extension = params.extensions?.[extensionName]; if (!extension) { - logger.warn( - `Missing HMAC signature: extension ${extensionName} not found in request.`, - ); - throw new Error( `Missing HMAC signature: extension ${extensionName} not found in request.`, ); @@ -202,8 +202,9 @@ export function useHmacSignatureValidation( c.charCodeAt(0), ); const serializedParams = paramsSerializer(params); - logger.debug( - `HMAC signature will be calculate based on serialized params: ${serializedParams}`, + log.debug( + { serializedParams }, + 'HMAC signature will be calculate based on serialized params', ); return handleMaybePromise( @@ -216,10 +217,9 @@ export function useHmacSignatureValidation( ), (result) => { if (!result) { - logger.error( - `HMAC signature does not match the body content. short circuit request.`, + log.error( + 'HMAC signature does not match the body content. short circuit request.', ); - throw new Error( `Invalid HMAC signature: extension ${extensionName} does not match the body content.`, ); diff --git a/packages/plugins/hmac-upstream-signature/tests/hmac-upstream-signature.spec.ts b/packages/plugins/hmac-upstream-signature/tests/hmac-upstream-signature.spec.ts index 2100ecfcc..a14d9c586 100644 --- a/packages/plugins/hmac-upstream-signature/tests/hmac-upstream-signature.spec.ts +++ b/packages/plugins/hmac-upstream-signature/tests/hmac-upstream-signature.spec.ts @@ -4,6 +4,7 @@ import { GatewayPlugin, useCustomFetch, } from '@graphql-hive/gateway-runtime'; +import { Logger } from '@graphql-hive/logger'; import { getUnifiedGraphGracefully } from '@graphql-mesh/fusion-composition'; import { MeshFetch } from '@graphql-mesh/types'; import { GraphQLSchema, stripIgnoredCharacters } from 'graphql'; @@ -61,6 +62,7 @@ for (const [name, createConfig] of Object.entries(cases)) { ...createConfig(upstreamSchema), plugins: () => [ useHmacSignatureValidation({ + log: new Logger({ level: false }), secret: 'topSecret', }), useCustomFetch(upstream.fetch as MeshFetch), @@ -119,6 +121,7 @@ for (const [name, createConfig] of Object.entries(cases)) { schema: upstreamSchema, plugins: [ useHmacSignatureValidation({ + log: new Logger({ level: false }), secret: sharedSecret, }), ], diff --git a/packages/plugins/jwt-auth/src/index.ts b/packages/plugins/jwt-auth/src/index.ts index 001373555..351ebbb82 100644 --- a/packages/plugins/jwt-auth/src/index.ts +++ b/packages/plugins/jwt-auth/src/index.ts @@ -76,7 +76,7 @@ export function useJWT( executionRequest, subgraphName, setExecutionRequest, - logger, + log, }) { if (shouldForward && executionRequest.context?.jwt) { const jwtData: Partial = { @@ -86,9 +86,9 @@ export function useJWT( token: forwardToken ? executionRequest.context.jwt.token : undefined, }; - logger?.debug( - `Forwarding JWT payload to subgraph ${subgraphName}, payload: `, - jwtData.payload, + log.debug( + { payload: jwtData.payload }, + `[useJWT] Forwarding JWT payload to subgraph ${subgraphName}`, ); setExecutionRequest({ diff --git a/packages/plugins/opentelemetry/src/plugin.ts b/packages/plugins/opentelemetry/src/plugin.ts index 799883c3f..8b5977d2d 100644 --- a/packages/plugins/opentelemetry/src/plugin.ts +++ b/packages/plugins/opentelemetry/src/plugin.ts @@ -1,7 +1,7 @@ import { getRetryInfo, isRetryExecutionRequest, - type GatewayConfigContext, + Logger, type GatewayPlugin, } from '@graphql-hive/gateway-runtime'; import { getHeadersObj } from '@graphql-mesh/utils'; @@ -37,7 +37,6 @@ import { WebTracerProvider, } from '@opentelemetry/sdk-trace-web'; import { unfakePromise } from '@whatwg-node/promise-helpers'; -import { YogaLogger } from 'graphql-yoga'; import { ATTR_SERVICE_VERSION, SEMRESATTRS_SERVICE_NAME } from './attributes'; import { getContextManager, OtelContextStack } from './context'; import { @@ -278,7 +277,7 @@ export type OpenTelemetryPlugin = export function useOpenTelemetry( options: OpenTelemetryGatewayPluginOptions & { - logger?: GatewayConfigContext['logger']; + log: Logger; }, ): OpenTelemetryPlugin { const inheritContext = options.inheritContext ?? true; @@ -312,14 +311,10 @@ export function useOpenTelemetry( return specificState?.current ?? ROOT_CONTEXT; } - const yogaLogger = createDeferred(); - let pluginLogger = options.logger - ? fakePromise( - options.logger.child({ - plugin: 'OpenTelemetry', - }), - ) - : yogaLogger.promise; + const logger = createDeferred(); + let pluginLogger = options.log + ? fakePromise(options.log.child('[useOpenTelemetry] ')) + : logger.promise; function init(): Promise { if ('initializeNodeSDK' in options && options.initializeNodeSDK === false) { @@ -394,14 +389,14 @@ export function useOpenTelemetry( }), ); preparation$ = fakePromise(); - return pluginLogger.then((logger) => { - pluginLogger = fakePromise(logger); - logger.debug( + return pluginLogger.then((log) => { + pluginLogger = fakePromise(log); + log.debug( `context manager is ${useContextManager ? 'enabled' : 'disabled'}`, ); if (!useContextManager) { if (options.spans?.schema) { - logger.warn( + log.warn( 'Schema loading spans are disabled because no context manager is available', ); } @@ -411,15 +406,15 @@ export function useOpenTelemetry( diag.setLogger( { error: (message, ...args) => - logger.error('[otel-diag] ' + message, ...args), + log.error('[otel-diag] ' + message, ...args), warn: (message, ...args) => - logger.warn('[otel-diag] ' + message, ...args), + log.warn('[otel-diag] ' + message, ...args), info: (message, ...args) => - logger.info('[otel-diag] ' + message, ...args), + log.info('[otel-diag] ' + message, ...args), debug: (message, ...args) => - logger.debug('[otel-diag] ' + message, ...args), + log.debug('[otel-diag] ' + message, ...args), verbose: (message, ...args) => - logger.debug('[otel-diag] ' + message, ...args), + log.debug('[otel-diag] ' + message, ...args), }, options.diagLevel ?? DiagLogLevel.VERBOSE, ); @@ -756,7 +751,7 @@ export function useOpenTelemetry( onYogaInit({ yoga }) { yogaVersion.resolve(yoga.version); - yogaLogger.resolve(yoga.logger); + logger.resolve(options.log); }, onEnveloped({ state, extendContext }) { diff --git a/packages/plugins/opentelemetry/tests/yoga.spec.ts b/packages/plugins/opentelemetry/tests/yoga.spec.ts index 8e17c5c15..0dfe37b4f 100644 --- a/packages/plugins/opentelemetry/tests/yoga.spec.ts +++ b/packages/plugins/opentelemetry/tests/yoga.spec.ts @@ -1,3 +1,4 @@ +import { Logger } from '@graphql-hive/logger'; import { useOpenTelemetry } from '@graphql-mesh/plugin-opentelemetry'; import { createSchema, createYoga, Plugin as YogaPlugin } from 'graphql-yoga'; import { beforeEach, describe, expect, it, vi } from 'vitest'; @@ -34,6 +35,7 @@ describe('useOpenTelemetry', () => { const otelPlugin = useOpenTelemetry({ initializeNodeSDK: false, contextManager, + log: new Logger({ level: false }), }); const yoga = createYoga({ diff --git a/packages/plugins/prometheus/package.json b/packages/plugins/prometheus/package.json index 0a0239229..036bb1d7a 100644 --- a/packages/plugins/prometheus/package.json +++ b/packages/plugins/prometheus/package.json @@ -44,6 +44,7 @@ }, "dependencies": { "@graphql-hive/gateway-runtime": "workspace:^", + "@graphql-hive/logger": "workspace:^", "@graphql-mesh/cross-helpers": "^0.4.10", "@graphql-mesh/types": "^0.104.0", "@graphql-mesh/utils": "^0.104.2", diff --git a/packages/plugins/prometheus/src/index.ts b/packages/plugins/prometheus/src/index.ts index 00e2aa24b..3918ac141 100644 --- a/packages/plugins/prometheus/src/index.ts +++ b/packages/plugins/prometheus/src/index.ts @@ -1,12 +1,11 @@ -import { type GatewayPlugin } from '@graphql-hive/gateway-runtime'; +import type { GatewayPlugin, OnFetchHook } from '@graphql-hive/gateway-runtime'; +import type { Logger } from '@graphql-hive/logger'; import type { OnSubgraphExecuteHook } from '@graphql-mesh/fusion-runtime'; import type { TransportEntry } from '@graphql-mesh/transport-common'; import type { ImportFn, - Logger, MeshFetchRequestInit, MeshPlugin, - OnFetchHook, } from '@graphql-mesh/types'; import { defaultImportFn, @@ -140,12 +139,11 @@ type MeshMetricsConfig = { */ fetchResponseHeaders?: boolean | string[]; }; - /** * The logger instance used by the plugin to log messages. * This should be the logger instance provided by Mesh in the plugins context. */ - logger: Logger; + log: Logger; }; export type PrometheusPluginOptions = PrometheusTracingPluginConfig & @@ -369,7 +367,7 @@ export default function useMeshPrometheus( } function registryFromYamlConfig( - config: YamlConfig & { logger: Logger }, + config: YamlConfig & { log: Logger }, ): Registry { if (!config.registry) { throw new Error('Registry not defined in the YAML config'); @@ -393,7 +391,9 @@ function registryFromYamlConfig( registry$ .then(() => registryProxy.revoke()) - .catch((e) => config.logger.error(e)); + .catch((e) => + config.log.error(e, '[usePrometheus] Failed to load Prometheus registry'), + ); return registryProxy.proxy; } diff --git a/packages/runtime/package.json b/packages/runtime/package.json index 719daddba..c2e6ac18d 100644 --- a/packages/runtime/package.json +++ b/packages/runtime/package.json @@ -47,8 +47,9 @@ "@envelop/core": "^5.2.3", "@envelop/disable-introspection": "^7.0.0", "@envelop/generic-auth": "^9.0.0", + "@envelop/instrumentation": "^1.0.0", "@graphql-hive/core": "^0.12.0", - "@graphql-hive/logger-json": "workspace:^", + "@graphql-hive/logger": "workspace:^", "@graphql-hive/pubsub": "workspace:^", "@graphql-hive/signal": "workspace:^", "@graphql-hive/yoga": "^0.42.1", diff --git a/packages/runtime/src/createGatewayRuntime.ts b/packages/runtime/src/createGatewayRuntime.ts index e45a06ea6..9029a7f3e 100644 --- a/packages/runtime/src/createGatewayRuntime.ts +++ b/packages/runtime/src/createGatewayRuntime.ts @@ -9,6 +9,7 @@ import { createSchemaFetcher, createSupergraphSDLFetcher, } from '@graphql-hive/core'; +import { LegacyLogger } from '@graphql-hive/logger'; import type { OnDelegationPlanHook, OnDelegationStageExecuteHook, @@ -27,19 +28,13 @@ import { import { useHmacUpstreamSignature } from '@graphql-mesh/hmac-upstream-signature'; import useMeshResponseCache from '@graphql-mesh/plugin-response-cache'; import { TransportContext } from '@graphql-mesh/transport-common'; -import type { - KeyValueCache, - OnDelegateHook, - OnFetchHook, -} from '@graphql-mesh/types'; +import type { KeyValueCache, OnDelegateHook } from '@graphql-mesh/types'; import { dispose, getHeadersObj, getInContextSDK, isDisposable, isUrl, - LogLevel, - wrapFetchWithHooks, } from '@graphql-mesh/utils'; import { batchDelegateToSchema } from '@graphql-tools/batch-delegate'; import { @@ -73,6 +68,7 @@ import { isSchema, parse, } from 'graphql'; +import type { GraphiQLOptions, PromiseOrValue } from 'graphql-yoga'; import { chain, createYoga, @@ -83,9 +79,8 @@ import { type LandingPageRenderer, type YogaServerInstance, } from 'graphql-yoga'; -import type { GraphiQLOptions, PromiseOrValue } from 'graphql-yoga'; +import { createLoggerFromLogging } from './createLoggerFromLogging'; import { createGraphOSFetcher } from './fetchers/graphos'; -import { handleLoggingConfig } from './getDefaultLogger'; import { getProxyExecutor } from './getProxyExecutor'; import { getReportingPlugin } from './getReportingPlugin'; import { @@ -119,6 +114,7 @@ import type { OnCacheDeleteHook, OnCacheGetHook, OnCacheSetHook, + OnFetchHook, UnifiedGraphConfig, } from './types'; import { @@ -127,6 +123,7 @@ import { getExecuteFnFromExecutor, wrapCacheWithHooks, } from './utils'; +import { wrapFetchWithHooks } from './wrapFetchWithHooks'; // TODO: this type export is not properly accessible from graphql-yoga // "graphql-yoga/typings/plugins/use-graphiql.js" is an illegal path @@ -151,7 +148,7 @@ export function createGatewayRuntime< TContext extends Record = Record, >(config: GatewayConfig): GatewayRuntime { let fetchAPI = config.fetchAPI; - const logger = handleLoggingConfig(config.logging); + const log = createLoggerFromLogging(config.logging); let instrumentation: GatewayPlugin['instrumentation']; @@ -161,8 +158,8 @@ export function createGatewayRuntime< const onCacheDeleteHooks: OnCacheDeleteHook[] = []; const wrappedFetchFn = wrapFetchWithHooks( onFetchHooks, + log, () => instrumentation, - logger, ); const wrappedCache: KeyValueCache | undefined = config.cache ? wrapCacheWithHooks({ @@ -177,7 +174,7 @@ export function createGatewayRuntime< const configContext: GatewayConfigContext = { fetch: wrappedFetchFn, - logger, + log, cwd: config.cwd || (typeof process !== 'undefined' ? process.cwd() : ''), cache: wrappedCache, pubsub, @@ -217,9 +214,7 @@ export function createGatewayRuntime< persistedDocumentsPlugin = useHiveConsole({ ...configContext, enabled: false, // disables only usage reporting - logger: configContext.logger.child({ - plugin: 'Hive Persisted Documents', - }), + log: configContext.log.child('[useHiveConsole.persistedDocuments] '), experimental__persistedDocuments: { cdn: { endpoint: config.persistedDocuments.endpoint, @@ -284,7 +279,9 @@ export function createGatewayRuntime< const fetcher = createSchemaFetcher({ endpoint, key, - logger: configContext.logger.child({ source: 'Hive CDN' }), + logger: LegacyLogger.from( + configContext.log.child('[hiveSchemaFetcher] '), + ), }); schemaFetcher = function fetchSchemaFromCDN() { pausePolling(); @@ -353,7 +350,7 @@ export function createGatewayRuntime< return true; }, (err) => { - configContext.logger.warn(`Failed to introspect schema`, err); + configContext.log.warn(`Failed to introspect schema`, err); return true; }, ); @@ -477,7 +474,10 @@ export function createGatewayRuntime< const onSubgraphExecute = getOnSubgraphExecute({ onSubgraphExecuteHooks, ...(config.transports ? { transports: config.transports } : {}), - transportContext: configContext, + transportContext: { + ...configContext, + logger: LegacyLogger.from(configContext.log), + }, transportEntryMap, getSubgraphSchema() { return unifiedGraph; @@ -639,7 +639,7 @@ export function createGatewayRuntime< unifiedGraph, // @ts-expect-error - Typings are wrong in legacy Mesh [subschemaConfig], - configContext.logger, + LegacyLogger.from(configContext.log), onDelegateHooks, ), ); @@ -673,7 +673,8 @@ export function createGatewayRuntime< const fetcher = createSupergraphSDLFetcher({ endpoint, key, - logger: configContext.logger.child({ source: 'Hive CDN' }), + log: configContext.log.child('[hiveSupergraphFetcher] '), + // @ts-expect-error - MeshFetch is not compatible with `typeof fetch` fetchImplementation: configContext.fetch, }); @@ -698,10 +699,10 @@ export function createGatewayRuntime< // local or remote if (!isDynamicUnifiedGraphSchema(config.supergraph)) { // no polling for static schemas - logger.debug(`Disabling polling for static supergraph`); + log.debug(`Disabling polling for static supergraph`); delete config.pollingInterval; } else if (!config.pollingInterval) { - logger.debug( + log.debug( `Polling interval not set for supergraph, if you want to get updates of supergraph, we recommend setting a polling interval`, ); } @@ -736,7 +737,10 @@ export function createGatewayRuntime< transports: config.transports, transportEntryAdditions: config.transportEntries, pollingInterval: config.pollingInterval, - transportContext: configContext, + transportContext: { + ...configContext, + logger: LegacyLogger.from(configContext.log), + }, onDelegateHooks, onSubgraphExecuteHooks, onDelegationPlanHooks, @@ -748,24 +752,22 @@ export function createGatewayRuntime< }); getSchema = () => unifiedGraphManager.getUnifiedGraph(); readinessChecker = () => { - const logger = configContext.logger.child('readiness'); - logger.debug(`checking`); + const log = configContext.log.child('[readiness] '); + log.debug('checking'); return handleMaybePromise( () => unifiedGraphManager.getUnifiedGraph(), (schema) => { if (!schema) { - logger.debug( - `failed because supergraph has not been loaded yet or failed to load`, + log.debug( + 'failed because supergraph has not been loaded yet or failed to load', ); return false; } - logger.debug('passed'); + log.debug('passed'); return true; }, (err) => { - logger.error( - `failed due to errors on loading supergraph:\n${err.stack || err.message}`, - ); + log.error(err, 'loading supergraph failed due to errors'); return false; }, ); @@ -1024,7 +1026,7 @@ export function createGatewayRuntime< readinessCheckPlugin, registryPlugin, persistedDocumentsPlugin, - useRetryOnSchemaReload({ logger }), + useRetryOnSchemaReload({ log }), ]; if (config.subgraphErrors !== false) { @@ -1153,23 +1155,21 @@ export function createGatewayRuntime< let isDebug: boolean = false; - if ('level' in logger) { - if (logger.level === 'debug' || logger.level === LogLevel.debug) { - isDebug = true; - } + if (config.logging === 'debug') { + isDebug = true; } else { - logger.debug(() => { + // we use the logger's debug option because the extra plugins only add more logs + log.debug(() => { isDebug = true; - return 'Debug mode enabled'; - }); + }, 'Debug mode enabled'); } if (isDebug) { extraPlugins.push( - useSubgraphExecuteDebug(configContext), - useFetchDebug(configContext), - useDelegationPlanDebug(configContext), - useCacheDebug(configContext), + useSubgraphExecuteDebug(), + useFetchDebug(), + useDelegationPlanDebug(), + useCacheDebug({ log: configContext.log }), ); } @@ -1178,7 +1178,7 @@ export function createGatewayRuntime< schema: unifiedGraph, // @ts-expect-error MeshFetch is not compatible with YogaFetch fetchAPI: config.fetchAPI, - logging: logger, + logging: log, plugins: [ ...basePlugins, ...extraPlugins, diff --git a/packages/runtime/src/createLoggerFromLogging.ts b/packages/runtime/src/createLoggerFromLogging.ts new file mode 100644 index 000000000..2460e956a --- /dev/null +++ b/packages/runtime/src/createLoggerFromLogging.ts @@ -0,0 +1,13 @@ +import { Logger, LogLevel } from '@graphql-hive/logger'; + +export function createLoggerFromLogging( + logging: boolean | Logger | LogLevel | undefined, +) { + if (logging == null || typeof logging === 'boolean') { + return new Logger({ level: logging === false ? false : 'info' }); + } + if (typeof logging === 'string') { + return new Logger({ level: logging }); + } + return logging; +} diff --git a/packages/runtime/src/fetchers/graphos.ts b/packages/runtime/src/fetchers/graphos.ts index 29ae7c273..8894bb939 100644 --- a/packages/runtime/src/fetchers/graphos.ts +++ b/packages/runtime/src/fetchers/graphos.ts @@ -56,8 +56,8 @@ export function createGraphOSFetcher({ graphosOpts.upLink || process.env['APOLLO_SCHEMA_CONFIG_DELIVERY_ENDPOINT']; const uplinks = uplinksParam?.split(',').map((uplink) => uplink.trim()) || DEFAULT_UPLINKS; - const graphosLogger = configContext.logger.child({ source: 'GraphOS' }); - graphosLogger.info('Using GraphOS with uplinks ', ...uplinks); + const log = configContext.log.child('[apolloGraphOSSupergraphFetcher] '); + log.info({ uplinks }, 'Using uplinks'); let supergraphLoadedPlace = defaultLoadedPlacePrefix; if (graphosOpts.graphRef) { supergraphLoadedPlace += `
${graphosOpts.graphRef}`; @@ -80,8 +80,8 @@ export function createGraphOSFetcher({ const currentTime = Date.now(); if (nextFetchTime >= currentTime) { const delay = nextFetchTime - currentTime; - graphosLogger.info( - `Fetching supergraph with delay: ${millisecondsToStr(delay)}`, + log.info( + `Fetching supergraph with delay ${millisecondsToStr(delay)}`, ); nextFetchTime = 0; return delayInMs(delay).then(fetchSupergraph); @@ -101,8 +101,8 @@ export function createGraphOSFetcher({ if (maxRetries > 1) { attemptMetadata['attempt'] = `${maxRetries - retries}/${maxRetries}`; } - const attemptLogger = graphosLogger.child(attemptMetadata); - attemptLogger.debug(`Fetching supergraph`); + const attemptLogger = log.child(attemptMetadata); + attemptLogger.debug('Fetching supergraph'); return handleMaybePromise( () => fetchSupergraphSdlFromManagedFederation({ diff --git a/packages/runtime/src/getDefaultLogger.ts b/packages/runtime/src/getDefaultLogger.ts deleted file mode 100644 index 037f779b6..000000000 --- a/packages/runtime/src/getDefaultLogger.ts +++ /dev/null @@ -1,67 +0,0 @@ -import { JSONLogger } from '@graphql-hive/logger-json'; -import { process } from '@graphql-mesh/cross-helpers'; -import { Logger } from '@graphql-mesh/types'; -import { DefaultLogger, LogLevel } from '@graphql-mesh/utils'; - -export function getDefaultLogger(opts?: { name?: string; level?: LogLevel }) { - const logFormat = process.env['LOG_FORMAT'] || (globalThis as any).LOG_FORMAT; - if (logFormat) { - if (logFormat.toLowerCase() === 'json') { - return new JSONLogger(opts); - } else if (logFormat.toLowerCase() === 'pretty') { - return new DefaultLogger(opts?.name, opts?.level); - } - } - const nodeEnv = process.env['NODE_ENV'] || (globalThis as any).NODE_ENV; - if (nodeEnv === 'production') { - return new JSONLogger(opts); - } - return new DefaultLogger(opts?.name, opts?.level); -} - -export function handleLoggingConfig( - loggingConfig: - | boolean - | Logger - | LogLevel - | keyof typeof LogLevel - | undefined, - existingLogger?: Logger, -) { - if (typeof loggingConfig === 'object') { - return loggingConfig; - } - if (typeof loggingConfig === 'boolean') { - if (!loggingConfig) { - if (existingLogger && 'logLevel' in existingLogger) { - existingLogger.logLevel = LogLevel.silent; - return existingLogger; - } - return getDefaultLogger({ - name: existingLogger?.name, - level: LogLevel.silent, - }); - } - } - if (typeof loggingConfig === 'number') { - if (existingLogger && 'logLevel' in existingLogger) { - existingLogger.logLevel = loggingConfig; - return existingLogger; - } - return getDefaultLogger({ - name: existingLogger?.name, - level: loggingConfig, - }); - } - if (typeof loggingConfig === 'string') { - if (existingLogger && 'logLevel' in existingLogger) { - existingLogger.logLevel = LogLevel[loggingConfig]; - return existingLogger; - } - return getDefaultLogger({ - name: existingLogger?.name, - level: LogLevel[loggingConfig], - }); - } - return existingLogger || getDefaultLogger(); -} diff --git a/packages/runtime/src/getProxyExecutor.ts b/packages/runtime/src/getProxyExecutor.ts index ec219dbf4..acbc52fad 100644 --- a/packages/runtime/src/getProxyExecutor.ts +++ b/packages/runtime/src/getProxyExecutor.ts @@ -1,3 +1,4 @@ +import { LegacyLogger } from '@graphql-hive/logger'; import type { Instrumentation, OnSubgraphExecuteHook, @@ -42,7 +43,10 @@ export function getProxyExecutor>({ return fakeTransportEntryMap[subgraphNameProp]; }, }), - transportContext: configContext, + transportContext: { + ...configContext, + logger: LegacyLogger.from(configContext.log), + }, getSubgraphSchema: getSchema, transportExecutorStack, transports: config.transports, diff --git a/packages/runtime/src/getReportingPlugin.ts b/packages/runtime/src/getReportingPlugin.ts index 367a3bed8..8540f7bde 100644 --- a/packages/runtime/src/getReportingPlugin.ts +++ b/packages/runtime/src/getReportingPlugin.ts @@ -30,7 +30,7 @@ export function getReportingPlugin>( return { name: 'Hive', plugin: useHiveConsole({ - logger: configContext.logger.child({ reporting: 'Hive' }), + log: configContext.log.child('[useHiveConsole] '), enabled: true, ...reporting, ...(usage ? { usage } : {}), diff --git a/packages/runtime/src/handleUnifiedGraphConfig.ts b/packages/runtime/src/handleUnifiedGraphConfig.ts index 40bb3249f..fb39c7788 100644 --- a/packages/runtime/src/handleUnifiedGraphConfig.ts +++ b/packages/runtime/src/handleUnifiedGraphConfig.ts @@ -1,3 +1,4 @@ +import { LegacyLogger } from '@graphql-hive/logger'; import { UnifiedGraphManagerOptions } from '@graphql-mesh/fusion-runtime'; import { defaultImportFn, isUrl, readFileOrUrl } from '@graphql-mesh/utils'; import { defaultPrintFn } from '@graphql-tools/executor-common'; @@ -51,7 +52,7 @@ export function handleUnifiedGraphSchema( return readFileOrUrl(unifiedGraphSchema, { fetch: configContext.fetch, cwd: configContext.cwd, - logger: configContext.logger, + logger: LegacyLogger.from(configContext.log), allowUnknownExtensions: true, importFn: defaultImportFn, }); diff --git a/packages/runtime/src/index.ts b/packages/runtime/src/index.ts index b110fa61a..fe34d3970 100644 --- a/packages/runtime/src/index.ts +++ b/packages/runtime/src/index.ts @@ -1,7 +1,7 @@ export * from './createGatewayRuntime'; -export { LogLevel, DefaultLogger } from '@graphql-mesh/utils'; -export { JSONLogger } from '@graphql-hive/logger-json'; export type * from './types'; +export * from '@graphql-hive/logger'; +export * from './createLoggerFromLogging'; export * from './plugins/useCustomFetch'; export * from './plugins/useStaticFiles'; export * from './getProxyExecutor'; @@ -20,4 +20,3 @@ export { } from './plugins/useUpstreamRetry'; export { useUpstreamTimeout } from './plugins/useUpstreamTimeout'; export { getGraphQLWSOptions } from './getGraphQLWSOptions'; -export * from './getDefaultLogger'; diff --git a/packages/runtime/src/plugins/useCacheDebug.ts b/packages/runtime/src/plugins/useCacheDebug.ts index 480bf4da0..304ba8a74 100644 --- a/packages/runtime/src/plugins/useCacheDebug.ts +++ b/packages/runtime/src/plugins/useCacheDebug.ts @@ -1,48 +1,52 @@ -import { Logger } from '@graphql-mesh/types'; +import type { Logger } from '@graphql-hive/logger'; import { GatewayPlugin } from '../types'; -export function useCacheDebug>(opts: { - logger: Logger; +export function useCacheDebug>({ + log: rootLog, +}: { + log: Logger; }): GatewayPlugin { return { + onContextBuilding({ context }) { + // onContextBuilding might not execute at all so we use the root log + rootLog = context.log; + }, onCacheGet({ key }) { + const log = rootLog.child({ key }, '[useCacheDebug] '); + log.debug('Get'); return { onCacheGetError({ error }) { - const cacheGetErrorLogger = opts.logger.child('cache-get-error'); - cacheGetErrorLogger.error({ key, error }); + log.error({ key, error }, 'Error'); }, onCacheHit({ value }) { - const cacheHitLogger = opts.logger.child('cache-hit'); - cacheHitLogger.debug({ key, value }); + log.debug({ key, value }, 'Hit'); }, onCacheMiss() { - const cacheMissLogger = opts.logger.child('cache-miss'); - cacheMissLogger.debug({ key }); + log.debug({ key }, 'Miss'); }, }; }, onCacheSet({ key, value, ttl }) { + const log = rootLog.child({ key, value, ttl }, '[useCacheDebug] '); + log.debug('Set'); return { onCacheSetError({ error }) { - const cacheSetErrorLogger = opts.logger.child('cache-set-error'); - cacheSetErrorLogger.error({ key, value, ttl, error }); + log.error({ error }, 'Error'); }, onCacheSetDone() { - const cacheSetDoneLogger = opts.logger.child('cache-set-done'); - cacheSetDoneLogger.debug({ key, value, ttl }); + log.debug('Done'); }, }; }, onCacheDelete({ key }) { + const log = rootLog.child({ key }, '[useCacheDebug] '); + log.debug('Delete'); return { onCacheDeleteError({ error }) { - const cacheDeleteErrorLogger = - opts.logger.child('cache-delete-error'); - cacheDeleteErrorLogger.error({ key, error }); + log.error({ error }, 'Error'); }, onCacheDeleteDone() { - const cacheDeleteDoneLogger = opts.logger.child('cache-delete-done'); - cacheDeleteDoneLogger.debug({ key }); + log.debug('Done'); }, }; }, diff --git a/packages/runtime/src/plugins/useCustomAgent.ts b/packages/runtime/src/plugins/useCustomAgent.ts index 8211b3dc6..3f61a2cb5 100644 --- a/packages/runtime/src/plugins/useCustomAgent.ts +++ b/packages/runtime/src/plugins/useCustomAgent.ts @@ -2,8 +2,11 @@ import type { Agent as HttpAgent } from 'node:http'; // eslint-disable-next-line import/no-nodejs-modules import type { Agent as HttpsAgent } from 'node:https'; -import type { OnFetchHookPayload } from '@graphql-mesh/types'; -import type { GatewayContext, GatewayPlugin } from '../types'; +import type { + GatewayContext, + GatewayPlugin, + OnFetchHookPayload, +} from '../types'; export type AgentFactory = ( payload: OnFetchHookPayload< diff --git a/packages/runtime/src/plugins/useDelegationPlanDebug.ts b/packages/runtime/src/plugins/useDelegationPlanDebug.ts index eb792a9fa..9acc94281 100644 --- a/packages/runtime/src/plugins/useDelegationPlanDebug.ts +++ b/packages/runtime/src/plugins/useDelegationPlanDebug.ts @@ -1,4 +1,3 @@ -import type { Logger } from '@graphql-mesh/types'; import { pathToArray } from '@graphql-tools/utils'; import { print } from 'graphql'; import { FetchAPI } from 'graphql-yoga'; @@ -6,7 +5,7 @@ import type { GatewayContext, GatewayPlugin } from '../types'; export function useDelegationPlanDebug< TContext extends Record, ->(opts: { logger: Logger }): GatewayPlugin { +>(): GatewayPlugin { let fetchAPI: FetchAPI; const stageExecuteLogById = new WeakMap>(); return { @@ -18,15 +17,15 @@ export function useDelegationPlanDebug< variables, fragments, fieldNodes, + context, info, - logger = opts.logger, }) { const planId = fetchAPI.crypto.randomUUID(); - const planLogger = logger.child({ planId, typeName }); - const delegationPlanStartLogger = planLogger.child( - 'delegation-plan-start', + const log = context.log.child( + { planId, typeName }, + '[useDelegationPlanDebug] ', ); - delegationPlanStartLogger.debug(() => { + log.debug(() => { const logObj: Record = {}; if (variables && Object.keys(variables).length) { logObj['variables'] = variables; @@ -48,19 +47,21 @@ export function useDelegationPlanDebug< logObj['path'] = pathToArray(info.path).join(' | '); } return logObj; - }); + }, 'Start'); return ({ delegationPlan }) => { - const delegationPlanDoneLogger = logger.child('delegation-plan-done'); - delegationPlanDoneLogger.debug(() => - delegationPlan.map((plan) => { - const planObj: Record = {}; - for (const [subschema, selectionSet] of plan) { - if (subschema.name) { - planObj[subschema.name] = print(selectionSet); + log.debug( + () => ({ + delegationPlan: delegationPlan.map((plan) => { + const planObj: Record = {}; + for (const [subschema, selectionSet] of plan) { + if (subschema.name) { + planObj[subschema.name] = print(selectionSet); + } } - } - return planObj; + return planObj; + }), }), + 'Done', ); }; }, @@ -72,19 +73,18 @@ export function useDelegationPlanDebug< selectionSet, key, typeName, - logger = opts.logger, }) { let contextLog = stageExecuteLogById.get(context); if (!contextLog) { contextLog = new Set(); stageExecuteLogById.set(context, contextLog); } - const log = { + const logAttr = { key: JSON.stringify(key), object: JSON.stringify(object), selectionSet: print(selectionSet), }; - const logStr = JSON.stringify(log); + const logStr = JSON.stringify(logAttr); if (contextLog.has(logStr)) { return; } @@ -94,18 +94,16 @@ export function useDelegationPlanDebug< subgraph, typeName, }; - const delegationStageLogger = logger.child(logMeta); - delegationStageLogger.debug('delegation-plan-start', () => { - return { + const log = context.log.child(logMeta, '[useDelegationPlanDebug] '); + log.debug( + () => ({ ...log, path: pathToArray(info.path).join(' | '), - }; - }); + }), + 'Stage start', + ); return ({ result }) => { - const delegationStageExecuteDoneLogger = logger.child( - 'delegation-stage-execute-done', - ); - delegationStageExecuteDoneLogger.debug(() => result); + log.debug(() => result, 'Stage done'); }; }, }; diff --git a/packages/runtime/src/plugins/useDemandControl.ts b/packages/runtime/src/plugins/useDemandControl.ts index e55adf7e5..c7782fb34 100644 --- a/packages/runtime/src/plugins/useDemandControl.ts +++ b/packages/runtime/src/plugins/useDemandControl.ts @@ -69,8 +69,7 @@ export function useDemandControl>({ }); const costByContextMap = new WeakMap(); return { - onSubgraphExecute({ subgraph, executionRequest, logger }) { - const demandControlLogger = logger?.child('demand-control'); + onSubgraphExecute({ subgraph, executionRequest, log }) { let costByContext = executionRequest.context ? costByContextMap.get(executionRequest.context) || 0 : 0; @@ -83,10 +82,13 @@ export function useDemandControl>({ if (executionRequest.context) { costByContextMap.set(executionRequest.context, costByContext); } - demandControlLogger?.debug({ - operationCost, - totalCost: costByContext, - }); + log.debug( + { + operationCost, + totalCost: costByContext, + }, + '[useDemandControl]', + ); if (maxCost != null && costByContext > maxCost) { throw createGraphQLError( `Operation estimated cost ${costByContext} exceeded configured maximum ${maxCost}`, diff --git a/packages/runtime/src/plugins/useFetchDebug.ts b/packages/runtime/src/plugins/useFetchDebug.ts index d6a9eb207..3e87798da 100644 --- a/packages/runtime/src/plugins/useFetchDebug.ts +++ b/packages/runtime/src/plugins/useFetchDebug.ts @@ -1,38 +1,36 @@ -import type { Logger } from '@graphql-mesh/types'; import { FetchAPI } from 'graphql-yoga'; import type { GatewayPlugin } from '../types'; -export function useFetchDebug>(opts: { - logger: Logger; -}): GatewayPlugin { +export function useFetchDebug< + TContext extends Record, +>(): GatewayPlugin { let fetchAPI: FetchAPI; return { onYogaInit({ yoga }) { fetchAPI = yoga.fetchAPI; }, - onFetch({ url, options, logger = opts.logger }) { + onFetch({ url, options, context }) { const fetchId = fetchAPI.crypto.randomUUID(); - const fetchLogger = logger.child({ - fetchId, - }); - const httpFetchRequestLogger = fetchLogger.child('http-fetch-request'); - httpFetchRequestLogger.debug(() => ({ - url, - ...(options || {}), - body: options?.body, - headers: options?.headers, - signal: options?.signal?.aborted ? options?.signal?.reason : false, - })); + const log = context.log.child({ fetchId }, '[useFetchDebug] '); + log.debug( + () => ({ + url, + body: options?.body?.toString(), + headers: options?.headers, + signal: options?.signal?.aborted ? options?.signal?.reason : false, + }), + 'Request', + ); const start = performance.now(); return function onFetchDone({ response }) { - const httpFetchResponseLogger = fetchLogger.child( - 'http-fetch-response', + log.debug( + () => ({ + status: response.status, + headers: Object.fromEntries(response.headers.entries()), + duration: performance.now() - start, + }), + 'Response', ); - httpFetchResponseLogger.debug(() => ({ - status: response.status, - headers: Object.fromEntries(response.headers.entries()), - duration: performance.now() - start, - })); }; }, }; diff --git a/packages/runtime/src/plugins/useHiveConsole.ts b/packages/runtime/src/plugins/useHiveConsole.ts index 4f34a7c9a..6e5a0336b 100644 --- a/packages/runtime/src/plugins/useHiveConsole.ts +++ b/packages/runtime/src/plugins/useHiveConsole.ts @@ -1,7 +1,7 @@ import type { HivePluginOptions } from '@graphql-hive/core'; +import { LegacyLogger, type Logger } from '@graphql-hive/logger'; import { useHive } from '@graphql-hive/yoga'; import { process } from '@graphql-mesh/cross-helpers'; -import type { Logger } from '@graphql-mesh/types'; import { GatewayPlugin } from '../types'; export interface HiveConsolePluginOptions @@ -33,13 +33,13 @@ export default function useHiveConsole< enabled, token, ...options -}: HiveConsolePluginOptions & { logger: Logger }): GatewayPlugin< +}: HiveConsolePluginOptions & { log: Logger }): GatewayPlugin< TPluginContext, TContext > { const agent: HiveConsolePluginOptions['agent'] = { name: 'hive-gateway', - logger: options.logger, + logger: LegacyLogger.from(options.log), ...options.agent, }; diff --git a/packages/runtime/src/plugins/usePropagateHeaders.ts b/packages/runtime/src/plugins/usePropagateHeaders.ts index 939329a0f..fae492ab2 100644 --- a/packages/runtime/src/plugins/usePropagateHeaders.ts +++ b/packages/runtime/src/plugins/usePropagateHeaders.ts @@ -1,7 +1,6 @@ import { subgraphNameByExecutionRequest } from '@graphql-mesh/fusion-runtime'; -import type { OnFetchHookDone } from '@graphql-mesh/types'; import { handleMaybePromise } from '@whatwg-node/promise-helpers'; -import type { GatewayPlugin } from '../types'; +import type { GatewayPlugin, OnFetchHookDone } from '../types'; interface FromClientToSubgraphsPayload { request: Request; @@ -34,7 +33,10 @@ export function usePropagateHeaders>( const resHeadersByRequest = new WeakMap>(); return { onFetch({ executionRequest, context, options, setOptions }) { - const request = context?.request || executionRequest?.context?.request; + const request = + 'request' in context + ? context?.request || executionRequest?.context?.request + : undefined; if (request) { const subgraphName = (executionRequest && subgraphNameByExecutionRequest.get(executionRequest))!; diff --git a/packages/runtime/src/plugins/useRequestId.ts b/packages/runtime/src/plugins/useRequestId.ts index 46468012b..57ca584c2 100644 --- a/packages/runtime/src/plugins/useRequestId.ts +++ b/packages/runtime/src/plugins/useRequestId.ts @@ -1,4 +1,8 @@ -import { requestIdByRequest } from '@graphql-mesh/utils'; +import { LegacyLogger } from '@graphql-hive/logger'; +import { + loggerForRequest, + requestIdByRequest, +} from '@graphql-hive/logger/request'; import { FetchAPI } from '@whatwg-node/server'; import type { GatewayContext, GatewayPlugin } from '../types'; @@ -48,17 +52,26 @@ export function useRequestId>( }); requestIdByRequest.set(request, requestId); }, - onContextBuilding({ context }) { - if (context?.request) { - const requestId = requestIdByRequest.get(context.request); - if (requestId && context.logger) { - // @ts-expect-error - Logger is somehow read-only - context.logger = context.logger.child({ requestId }); - } + onContextBuilding({ context, extendContext }) { + // the request ID wont always be available because there's no request in websockets + const requestId = requestIdByRequest.get(context.request); + let log = context.log; + if (requestId) { + log = loggerForRequest( + context.log.child({ requestId }), + context.request, + ); } + extendContext( + // @ts-expect-error TODO: typescript is acting up here + { + log, + logger: LegacyLogger.from(log), + }, + ); }, onFetch({ context, options, setOptions }) { - if (context?.request) { + if ('request' in context) { const requestId = requestIdByRequest.get(context.request); if (requestId) { setOptions({ diff --git a/packages/runtime/src/plugins/useRetryOnSchemaReload.ts b/packages/runtime/src/plugins/useRetryOnSchemaReload.ts index f0f5060a1..8dc8c8910 100644 --- a/packages/runtime/src/plugins/useRetryOnSchemaReload.ts +++ b/packages/runtime/src/plugins/useRetryOnSchemaReload.ts @@ -1,5 +1,5 @@ -import { Logger } from '@graphql-mesh/types'; -import { requestIdByRequest } from '@graphql-mesh/utils'; +import type { Logger } from '@graphql-hive/logger'; +import { loggerForRequest } from '@graphql-hive/logger/request'; import type { MaybeAsyncIterable } from '@graphql-tools/utils'; import { handleMaybePromise, @@ -12,9 +12,9 @@ import type { GatewayPlugin } from '../types'; type ExecHandler = () => MaybePromise>; export function useRetryOnSchemaReload>({ - logger, + log: rootLog, }: { - logger: Logger; + log: Logger; }): GatewayPlugin { const execHandlerByContext = new WeakMap<{}, ExecHandler>(); function handleOnExecute(args: ExecutionArgs) { @@ -26,6 +26,7 @@ export function useRetryOnSchemaReload>({ } } } + const logForRequest = new WeakMap(); function handleExecutionResult({ context, result, @@ -35,20 +36,22 @@ export function useRetryOnSchemaReload>({ context: {}; result?: ExecutionResult; setResult: (result: MaybeAsyncIterable) => void; - request: Request; + // request wont be available over websockets + request: Request | undefined; }) { const execHandler = execHandlerByContext.get(context); if ( execHandler && result?.errors?.some((e) => e.extensions?.['code'] === 'SCHEMA_RELOAD') ) { - let requestLogger = logger; - const requestId = requestIdByRequest.get(request); - if (requestId) { - requestLogger = logger.child({ requestId }); - } - requestLogger.info( - 'The operation has been aborted after the supergraph schema reloaded, retrying the operation...', + const log = request + ? loggerForRequest( + logForRequest.get(request)!, // must exist at this point + request, + ) + : rootLog; + log.info( + '[useRetryOnSchemaReload] The operation has been aborted after the supergraph schema reloaded, retrying the operation...', ); if (execHandler) { return handleMaybePromise(execHandler, (newResult) => @@ -67,10 +70,20 @@ export function useRetryOnSchemaReload>({ }), ); }, - onExecute({ args }) { + onExecute({ args, context }) { + // we set the logger here because it most likely contains important attributes (like the request-id) + if (context.request) { + // the request wont be available over websockets + logForRequest.set(context.request, context.log); + } handleOnExecute(args); }, - onSubscribe({ args }) { + onSubscribe({ args, context }) { + // we set the logger here because it most likely contains important attributes (like the request-id) + if (context.request) { + // the request wont be available over websockets + logForRequest.set(context.request, context.log); + } handleOnExecute(args); }, onExecutionResult({ request, context, result, setResult }) { diff --git a/packages/runtime/src/plugins/useSubgraphExecuteDebug.ts b/packages/runtime/src/plugins/useSubgraphExecuteDebug.ts index d009a1e2b..135763525 100644 --- a/packages/runtime/src/plugins/useSubgraphExecuteDebug.ts +++ b/packages/runtime/src/plugins/useSubgraphExecuteDebug.ts @@ -1,24 +1,26 @@ import { defaultPrintFn } from '@graphql-mesh/transport-common'; -import type { Logger } from '@graphql-mesh/types'; import { FetchAPI, isAsyncIterable } from 'graphql-yoga'; import type { GatewayPlugin } from '../types'; export function useSubgraphExecuteDebug< TContext extends Record, ->(opts: { logger: Logger }): GatewayPlugin { +>(): GatewayPlugin { let fetchAPI: FetchAPI; return { onYogaInit({ yoga }) { fetchAPI = yoga.fetchAPI; }, - onSubgraphExecute({ executionRequest, logger = opts.logger }) { - const subgraphExecuteHookLogger = logger.child({ - subgraphExecuteId: fetchAPI.crypto.randomUUID(), - }); - const subgraphExecuteStartLogger = subgraphExecuteHookLogger.child( - 'subgraph-execute-start', + onSubgraphExecute({ executionRequest }) { + const log = executionRequest.context?.log.child( + { + subgraphExecuteId: fetchAPI.crypto.randomUUID(), + }, + '[useSubgraphExecuteDebug] ', ); - subgraphExecuteStartLogger.debug(() => { + if (!log) { + throw new Error('Logger is not available in the execution context'); + } + log.debug(() => { const logData: Record = {}; if (executionRequest.document) { logData['query'] = defaultPrintFn(executionRequest.document); @@ -30,28 +32,25 @@ export function useSubgraphExecuteDebug< logData['variables'] = executionRequest.variables; } return logData; - }); + }, 'Start'); const start = performance.now(); return function onSubgraphExecuteDone({ result }) { - const subgraphExecuteEndLogger = subgraphExecuteHookLogger.child( - 'subgraph-execute-end', - ); if (isAsyncIterable(result)) { return { onNext({ result }) { - const subgraphExecuteNextLogger = subgraphExecuteHookLogger.child( - 'subgraph-execute-next', - ); - subgraphExecuteNextLogger.debug(result); + log.debug(result, 'Next'); }, onEnd() { - subgraphExecuteEndLogger.debug(() => ({ - duration: performance.now() - start, - })); + log.debug( + () => ({ + duration: performance.now() - start, + }), + 'End', + ); }, }; } - subgraphExecuteEndLogger.debug(result); + log.debug(result, 'Done'); return void 0; }; }, diff --git a/packages/runtime/src/plugins/useUpstreamCancel.ts b/packages/runtime/src/plugins/useUpstreamCancel.ts index a744a2a7e..04ed514fc 100644 --- a/packages/runtime/src/plugins/useUpstreamCancel.ts +++ b/packages/runtime/src/plugins/useUpstreamCancel.ts @@ -6,7 +6,7 @@ export function useUpstreamCancel(): GatewayPlugin { return { onFetch({ context, options, executionRequest, info }) { const signals: AbortSignal[] = []; - if (context?.request?.signal) { + if ('request' in context && context.request.signal) { signals.push(context.request.signal); } const execRequestSignal = diff --git a/packages/runtime/src/plugins/useUpstreamRetry.ts b/packages/runtime/src/plugins/useUpstreamRetry.ts index ee0043c84..7ef434d45 100644 --- a/packages/runtime/src/plugins/useUpstreamRetry.ts +++ b/packages/runtime/src/plugins/useUpstreamRetry.ts @@ -182,11 +182,8 @@ export function useUpstreamRetry>( } } }, - onFetch({ info, executionRequest }) { - // if there's no execution request, it's a subgraph request + onFetch({ executionRequest }) { // TODO: Also consider what happens when there are multiple fetch calls for a single subgraph request - // @ts-expect-error - we know that it might have executionRequest property - executionRequest ||= info?.rootValue?.executionRequest; if (executionRequest) { return function onFetchDone({ response }) { executionRequestResponseMap.set(executionRequest, response); diff --git a/packages/runtime/src/plugins/useWebhooks.ts b/packages/runtime/src/plugins/useWebhooks.ts index 8f557e5ec..1fddbf0bd 100644 --- a/packages/runtime/src/plugins/useWebhooks.ts +++ b/packages/runtime/src/plugins/useWebhooks.ts @@ -1,16 +1,18 @@ +import type { Logger } from '@graphql-hive/logger'; import { HivePubSub } from '@graphql-hive/pubsub'; -import type { Logger } from '@graphql-mesh/types'; import { handleMaybePromise } from '@whatwg-node/promise-helpers'; -import type { Plugin } from 'graphql-yoga'; +import { GatewayPlugin } from '../types'; export interface GatewayWebhooksPluginOptions { + log: Logger; pubsub?: HivePubSub; - logger: Logger; } + export function useWebhooks({ + log: rootLog, pubsub, - logger, -}: GatewayWebhooksPluginOptions): Plugin { +}: GatewayWebhooksPluginOptions): GatewayPlugin { + const log = rootLog.child('[useWebhooks] '); if (!pubsub) { throw new Error(`You must provide a pubsub instance to webhooks feature! Example: @@ -32,13 +34,13 @@ export function useWebhooks({ const expectedEventName = `webhook:${requestMethod}:${pathname}`; for (const eventName of eventNames) { if (eventName === expectedEventName) { - logger?.debug(() => `Received webhook request for ${pathname}`); + log.debug({ pathname }, 'Received webhook request'); return handleMaybePromise( () => request.text(), function handleWebhookPayload(webhookPayload) { - logger?.debug( - () => - `Emitted webhook request for ${pathname}: ${webhookPayload}`, + log.debug( + { pathname, payload: webhookPayload }, + 'Emitted webhook request', ); webhookPayload = request.headers.get('content-type') === 'application/json' diff --git a/packages/runtime/src/types.ts b/packages/runtime/src/types.ts index ebda1cc81..75d110ea3 100644 --- a/packages/runtime/src/types.ts +++ b/packages/runtime/src/types.ts @@ -1,5 +1,6 @@ import type { Plugin as EnvelopPlugin } from '@envelop/core'; import type { useGenericAuth } from '@envelop/generic-auth'; +import type { Logger, LogLevel } from '@graphql-hive/logger'; import { HivePubSub } from '@graphql-hive/pubsub'; import type { Instrumentation as GatewayRuntimeInstrumentation, @@ -11,13 +12,14 @@ import type { HMACUpstreamSignatureOptions } from '@graphql-mesh/hmac-upstream-s import type { ResponseCacheConfig } from '@graphql-mesh/plugin-response-cache'; import type { KeyValueCache, - Logger, + Logger as LegacyLogger, MeshFetch, - OnFetchHook, + MeshFetchRequestInit, } from '@graphql-mesh/types'; -import type { FetchInstrumentation, LogLevel } from '@graphql-mesh/utils'; +import type { FetchInstrumentation } from '@graphql-mesh/utils'; import type { HTTPExecutorOptions } from '@graphql-tools/executor-http'; import type { + ExecutionRequest, IResolvers, MaybePromise, TypeSource, @@ -35,6 +37,7 @@ import type { Plugin as YogaPlugin, YogaServerOptions, } from 'graphql-yoga'; +import { GraphQLResolveInfo } from 'graphql/type'; import type { UnifiedGraphConfig } from './handleUnifiedGraphConfig'; import type { UseContentEncodingOpts } from './plugins/useContentEncoding'; import type { AgentFactory } from './plugins/useCustomAgent'; @@ -61,9 +64,9 @@ export interface GatewayConfigContext { */ fetch: MeshFetch; /** - * The logger to use throught Mesh and it's plugins. + * The logger to use throught Hive and its plugins. */ - logger: Logger; + log: Logger; /** * Current working directory. */ @@ -96,7 +99,7 @@ export type GatewayPlugin< TContext extends Record = Record, > = YogaPlugin & GatewayContext & TContext> & UnifiedGraphPlugin & GatewayContext & TContext> & { - onFetch?: OnFetchHook & GatewayContext & TContext>; + onFetch?: OnFetchHook & TContext>; onCacheGet?: OnCacheGetHook; onCacheSet?: OnCacheSetHook; onCacheDelete?: OnCacheDeleteHook; @@ -112,6 +115,40 @@ export type GatewayPlugin< >; }; +export interface OnFetchHookPayload { + url: string; + setURL(url: URL | string): void; + options: MeshFetchRequestInit; + setOptions(options: MeshFetchRequestInit): void; + /** + * The context is not available in cases where "fetch" is done in + * order to pull a supergraph or do some internal work. + * + * The logger will be available in all cases. + */ + context: (GatewayContext & TContext) | { log: Logger }; + /** @deprecated Please use `log` from the {@link context} instead. */ + logger: LegacyLogger; + info: GraphQLResolveInfo; + fetchFn: MeshFetch; + setFetchFn: (fetchFn: MeshFetch) => void; + executionRequest?: ExecutionRequest; + endResponse: (response$: MaybePromise) => void; +} + +export interface OnFetchHookDonePayload { + response: Response; + setResponse: (response: Response) => void; +} + +export type OnFetchHookDone = ( + payload: OnFetchHookDonePayload, +) => MaybePromise; + +export type OnFetchHook = ( + payload: OnFetchHookPayload, +) => MaybePromise; + export type OnCacheGetHook = ( payload: OnCacheGetHookEventPayload, ) => MaybePromise; @@ -481,9 +518,10 @@ interface GatewayConfigBase> { * Enable, disable or implement a custom logger for logging. * * @default true + * * @see https://the-guild.dev/graphql/hive/docs/gateway/logging-and-error-handling */ - logging?: boolean | Logger | LogLevel | keyof typeof LogLevel | undefined; + logging?: boolean | Logger | LogLevel | undefined; /** * Endpoint of the GraphQL API. */ diff --git a/packages/runtime/src/wrapFetchWithHooks.ts b/packages/runtime/src/wrapFetchWithHooks.ts new file mode 100644 index 000000000..a8f27249f --- /dev/null +++ b/packages/runtime/src/wrapFetchWithHooks.ts @@ -0,0 +1,105 @@ +import { getInstrumented } from '@envelop/instrumentation'; +import { LegacyLogger, type Logger } from '@graphql-hive/logger'; +import type { MeshFetch } from '@graphql-mesh/types'; +import type { ExecutionRequest, MaybePromise } from '@graphql-tools/utils'; +import { handleMaybePromise, iterateAsync } from '@whatwg-node/promise-helpers'; +import { OnFetchHook, OnFetchHookDone } from './types'; + +export type FetchInstrumentation = { + fetch?: ( + payload: { executionRequest?: ExecutionRequest }, + wrapped: () => MaybePromise, + ) => MaybePromise; +}; + +export function wrapFetchWithHooks( + onFetchHooks: OnFetchHook[], + log: Logger, + instrumentation?: () => FetchInstrumentation | undefined, +): MeshFetch { + let wrappedFetchFn = function wrappedFetchFn(url, options, context, info) { + let fetchFn: MeshFetch; + let response$: MaybePromise; + const onFetchDoneHooks: OnFetchHookDone[] = []; + return handleMaybePromise( + () => + iterateAsync( + onFetchHooks, + (onFetch, endEarly) => + onFetch({ + fetchFn, + setFetchFn(newFetchFn) { + fetchFn = newFetchFn; + }, + url, + setURL(newUrl) { + url = String(newUrl); + }, + // @ts-expect-error TODO: why? + options, + setOptions(newOptions) { + options = newOptions; + }, + context: { log, ...context }, + logger: LegacyLogger.from(log), + // @ts-expect-error TODO: why? + info, + get executionRequest() { + return ( + info?.executionRequest || + // @ts-expect-error might be in the root value, see packages/fusion-runtime/src/utils.ts + info?.rootValue?.executionRequest + ); + }, + endResponse(newResponse) { + response$ = newResponse; + endEarly(); + }, + }), + onFetchDoneHooks, + ), + function handleIterationResult() { + if (response$) { + return response$; + } + return handleMaybePromise( + () => fetchFn(url, options, context, info), + function (response: Response) { + return handleMaybePromise( + () => + iterateAsync(onFetchDoneHooks, (onFetchDone) => + onFetchDone({ + response, + setResponse(newResponse) { + response = newResponse; + }, + }), + ), + function handleOnFetchDone() { + return response; + }, + ); + }, + ); + }, + ); + } as MeshFetch; + + if (instrumentation) { + const originalWrappedFetch = wrappedFetchFn; + wrappedFetchFn = function wrappedFetchFn(url, options, context, info) { + const fetchInstrument = instrumentation()?.fetch; + const instrumentedFetch = fetchInstrument + ? getInstrumented({ + get executionRequest() { + return info?.executionRequest; + }, + }).asyncFn(fetchInstrument, originalWrappedFetch) + : originalWrappedFetch; + + return instrumentedFetch(url, options, context, info); + }; + } + + return wrappedFetchFn; +} diff --git a/packages/runtime/tests/contentEncoding.test.ts b/packages/runtime/tests/contentEncoding.test.ts index 84443e376..317455fb1 100644 --- a/packages/runtime/tests/contentEncoding.test.ts +++ b/packages/runtime/tests/contentEncoding.test.ts @@ -1,5 +1,4 @@ import { getUnifiedGraphGracefully } from '@graphql-mesh/fusion-composition'; -import type { OnFetchHookDonePayload } from '@graphql-mesh/types'; import { getSupportedEncodings, useContentEncoding } from '@whatwg-node/server'; import { createSchema, @@ -8,7 +7,11 @@ import { type YogaInitialContext, } from 'graphql-yoga'; import { afterEach, describe, expect, it, vi } from 'vitest'; -import { createGatewayRuntime, useCustomFetch } from '../src/index'; +import { + createGatewayRuntime, + OnFetchHookDonePayload, + useCustomFetch, +} from '../src/index'; describe('contentEncoding', () => { const fooResolver = vi.fn((_, __, _context: YogaInitialContext) => { diff --git a/packages/runtime/tests/graphos.test.ts b/packages/runtime/tests/graphos.test.ts index b1135db8e..2ed5143b3 100644 --- a/packages/runtime/tests/graphos.test.ts +++ b/packages/runtime/tests/graphos.test.ts @@ -1,9 +1,10 @@ import { setTimeout } from 'timers/promises'; import { - JSONLogger, type GatewayConfigContext, type GatewayGraphOSManagedFederationOptions, } from '@graphql-hive/gateway-runtime'; +import { LegacyLogger, Logger } from '@graphql-hive/logger'; +import { TransportContext } from '@graphql-mesh/transport-common'; import { Response } from '@whatwg-node/fetch'; import { beforeEach, describe, expect, it, vi } from 'vitest'; import { createGraphOSFetcher } from '../src/fetchers/graphos'; @@ -20,7 +21,7 @@ describe('GraphOS', () => { it('should fetch the supergraph SDL', async () => { const { unifiedGraphFetcher } = createTestFetcher({ fetch: mockSDL }); - const result = Promise.resolve().then(() => unifiedGraphFetcher({})); + const result = Promise.resolve().then(() => unifiedGraphFetcher()); await advanceTimersByTimeAsync(1_000); expect(await result).toBe(supergraphSdl); }); @@ -37,7 +38,7 @@ describe('GraphOS', () => { }, }); - const result = Promise.resolve().then(() => unifiedGraphFetcher({})); + const result = Promise.resolve().then(() => unifiedGraphFetcher()); for (let i = 0; i < 3; i++) { await advanceTimersByTimeAsync(1_000); } @@ -52,7 +53,7 @@ describe('GraphOS', () => { ); const result = Promise.resolve() - .then(() => unifiedGraphFetcher({})) + .then(() => unifiedGraphFetcher()) .catch((err) => err); for (let i = 0; i < 3; i++) { await advanceTimersByTimeAsync(1_000); @@ -68,7 +69,7 @@ describe('GraphOS', () => { ); const result = Promise.resolve() - .then(() => unifiedGraphFetcher({})) + .then(() => unifiedGraphFetcher()) .catch(() => {}); await advanceTimersByTimeAsync(25); expect(mockFetchError).toHaveBeenCalledTimes(1); @@ -84,12 +85,12 @@ describe('GraphOS', () => { it('should respect min-delay between polls', async () => { const { unifiedGraphFetcher } = createTestFetcher({ fetch: mockSDL }); - Promise.resolve().then(() => unifiedGraphFetcher({})); + Promise.resolve().then(() => unifiedGraphFetcher()); await advanceTimersByTimeAsync(25); expect(mockSDL).toHaveBeenCalledTimes(1); await advanceTimersByTimeAsync(20); expect(mockSDL).toHaveBeenCalledTimes(1); - Promise.resolve().then(() => unifiedGraphFetcher({})); + Promise.resolve().then(() => unifiedGraphFetcher()); await advanceTimersByTimeAsync(50); expect(mockSDL).toHaveBeenCalledTimes(1); await advanceTimersByTimeAsync(50); @@ -107,19 +108,19 @@ describe('GraphOS', () => { return mockSDL(); }, }); - const result1 = Promise.resolve().then(() => unifiedGraphFetcher({})); + const result1 = Promise.resolve().then(() => unifiedGraphFetcher()); await advanceTimersByTimeAsync(1_000); - const result2 = Promise.resolve().then(() => unifiedGraphFetcher({})); + const result2 = Promise.resolve().then(() => unifiedGraphFetcher()); await advanceTimersByTimeAsync(1_000); expect(await result1).toBe(await result2); }, 30_000); it('should not wait if min delay is superior to polling interval', async () => { const { unifiedGraphFetcher } = createTestFetcher({ fetch: mockSDL }); - const result = Promise.resolve().then(() => unifiedGraphFetcher({})); + const result = Promise.resolve().then(() => unifiedGraphFetcher()); await advanceTimersByTimeAsync(1_000); await result; - const result2 = Promise.resolve().then(() => unifiedGraphFetcher({})); + const result2 = Promise.resolve().then(() => unifiedGraphFetcher()); await advanceTimersByTimeAsync(1_000); expect(await result).toBe(await result2); }); @@ -146,9 +147,9 @@ describe('GraphOS', () => { }, }); - const result = Promise.resolve().then(() => unifiedGraphFetcher({})); + const result = Promise.resolve().then(() => unifiedGraphFetcher()); await advanceTimersByTimeAsync(1_000); - const result2 = Promise.resolve().then(() => unifiedGraphFetcher({})); + const result2 = Promise.resolve().then(() => unifiedGraphFetcher()); await advanceTimersByTimeAsync(1_000); expect(await result).toBe(await result2); }); @@ -161,20 +162,10 @@ function createTestFetcher( }, opts?: Partial, ) { - return createGraphOSFetcher({ + const log = new Logger({ level: process.env['DEBUG'] ? 'debug' : false }); + const fetcher = createGraphOSFetcher({ configContext: { - logger: process.env['DEBUG'] - ? new JSONLogger() - : { - child() { - return this; - }, - info: () => {}, - debug: () => {}, - error: () => {}, - warn: () => {}, - log: () => {}, - }, + log, cwd: process.cwd(), ...configContext, }, @@ -186,6 +177,15 @@ function createTestFetcher( }, pollingInterval: 0.000000001, }); + return { + unifiedGraphFetcher: (transportContext: Partial = {}) => { + return fetcher.unifiedGraphFetcher({ + log, + logger: LegacyLogger.from(log), + ...transportContext, + }); + }, + }; } let supergraphSdl = 'TEST SDL'; diff --git a/packages/runtime/tests/wrapFetchWithHooks.test.ts b/packages/runtime/tests/wrapFetchWithHooks.test.ts new file mode 100644 index 000000000..145b5a880 --- /dev/null +++ b/packages/runtime/tests/wrapFetchWithHooks.test.ts @@ -0,0 +1,39 @@ +import { Logger } from '@graphql-hive/logger'; +import { createServerAdapter, Response } from '@whatwg-node/server'; +import type { GraphQLResolveInfo } from 'graphql'; +import { expect, it } from 'vitest'; +import { + wrapFetchWithHooks, + type FetchInstrumentation, +} from '../src/wrapFetchWithHooks'; + +it('should wrap fetch instrumentation', async () => { + await using adapter = createServerAdapter(() => + Response.json({ hello: 'world' }), + ); + let receivedExecutionRequest; + const fetchInstrumentation: FetchInstrumentation = { + fetch: async ({ executionRequest }, wrapped) => { + receivedExecutionRequest = executionRequest; + await wrapped(); + }, + }; + const wrappedFetch = wrapFetchWithHooks( + [ + ({ setFetchFn }) => { + setFetchFn( + // @ts-expect-error TODO: MeshFetch is not compatible with @whatwg-node/server fetch + adapter.fetch, + ); + }, + ], + new Logger({ level: false }), + () => fetchInstrumentation, + ); + const executionRequest = {}; + const res = await wrappedFetch('http://localhost:4000', {}, {}, { + executionRequest, + } as GraphQLResolveInfo); + expect(await res.json()).toEqual({ hello: 'world' }); + expect(receivedExecutionRequest).toBe(executionRequest); +}); diff --git a/packages/transports/common/package.json b/packages/transports/common/package.json index 6322bb0b0..61e3c0848 100644 --- a/packages/transports/common/package.json +++ b/packages/transports/common/package.json @@ -43,6 +43,7 @@ }, "dependencies": { "@envelop/core": "^5.2.3", + "@graphql-hive/logger": "workspace:^", "@graphql-hive/pubsub": "workspace:^", "@graphql-hive/signal": "workspace:^", "@graphql-mesh/types": "^0.104.0", diff --git a/packages/transports/common/src/types.ts b/packages/transports/common/src/types.ts index 529ca9828..c304d7179 100644 --- a/packages/transports/common/src/types.ts +++ b/packages/transports/common/src/types.ts @@ -1,5 +1,10 @@ +import type { Logger } from '@graphql-hive/logger'; import { HivePubSub } from '@graphql-hive/pubsub'; -import type { KeyValueCache, Logger, MeshFetch } from '@graphql-mesh/types'; +import type { + KeyValueCache, + Logger as LegacyLogger, + MeshFetch, +} from '@graphql-mesh/types'; import type { Executor, MaybePromise } from '@graphql-tools/utils'; import type { GraphQLError, GraphQLSchema } from 'graphql'; @@ -20,10 +25,14 @@ export interface TransportEntry< } export interface TransportContext { + log: Logger; + /** @deprecated Please migrate to using the {@link log}. */ + logger: LegacyLogger; + /** The fetch API to use. */ fetch?: MeshFetch; - pubsub?: HivePubSub; - logger?: Logger; + /** Will be empty when run on serverless. */ cwd?: string; + pubsub?: HivePubSub; cache?: KeyValueCache; } diff --git a/packages/transports/http-callback/src/index.ts b/packages/transports/http-callback/src/index.ts index 614670d67..8c006dbbf 100644 --- a/packages/transports/http-callback/src/index.ts +++ b/packages/transports/http-callback/src/index.ts @@ -73,7 +73,7 @@ export default { transportEntry, fetch, pubsub, - logger, + log: rootLog, }): DisposableExecutor { let headersInConfig: Record | undefined; if (typeof transportEntry.headers === 'string') { @@ -106,7 +106,7 @@ export default { executionRequest: ExecutionRequest, ) { const subscriptionId = crypto.randomUUID(); - const subscriptionLogger = logger?.child({ + const log = rootLog.child({ executor: 'http-callback', subscription: subscriptionId, }); @@ -138,8 +138,9 @@ export default { stopSubscription(createTimeoutError()); }, heartbeatIntervalMs), ); - subscriptionLogger?.debug( - `Subscribing to ${transportEntry.location} with callbackUrl: ${callbackUrl}`, + log.debug( + { location: transportEntry.location, callbackUrl }, + 'Subscribing using callback', ); let pushFn: Push = () => { throw new Error( @@ -202,7 +203,7 @@ export default { } return; } - logger?.debug(`Subscription request received`, resJson); + log.debug(resJson, 'Subscription request received'); if (resJson.errors) { if (resJson.errors.length === 1 && resJson.errors[0]) { const error = resJson.errors[0]; @@ -224,7 +225,7 @@ export default { }, ), (e) => { - logger?.debug(`Subscription request failed`, e); + log.error(e, 'Subscription request failed'); stopSubscription(e); }, ); @@ -246,13 +247,13 @@ export default { pushFn = push; stopSubscription = stop; stopFnSet.add(stop); - logger?.debug(`Listening to ${subscriptionCallbackPath}`); + log.debug(`Listening to ${subscriptionCallbackPath}`); const subId = pubsub.subscribe( `webhook:post:${subscriptionCallbackPath}`, (message: HTTPCallbackMessage) => { - logger?.debug( - `Received message from ${subscriptionCallbackPath}`, + log.debug( message, + `Received message from ${subscriptionCallbackPath}`, ); if (message.verifier !== verifier) { return; diff --git a/packages/transports/ws/src/index.ts b/packages/transports/ws/src/index.ts index 2f937b45a..22a42fe48 100644 --- a/packages/transports/ws/src/index.ts +++ b/packages/transports/ws/src/index.ts @@ -33,7 +33,7 @@ export interface WSTransportOptions { export default { getSubgraphExecutor( - { transportEntry, logger }, + { transportEntry, log: rootLog }, /** * Do not use this option unless you know what you are doing. * @internal @@ -76,12 +76,12 @@ export default { let wsExecutor = wsExecutorMap.get(hash); if (!wsExecutor) { - const executorLogger = logger?.child({ + const log = rootLog.child({ executor: 'GraphQL WS', wsUrl, connectionParams, headers, - } as Record); + }); wsExecutor = buildGraphQLWSExecutor({ headers, url: wsUrl, @@ -91,30 +91,30 @@ export default { connectionParams, on: { connecting(isRetry) { - executorLogger?.debug('connecting', { isRetry }); + log.debug({ isRetry }, 'connecting'); }, opened(socket) { - executorLogger?.debug('opened', { socket }); + log.debug({ socket }, 'opened'); }, connected(socket, payload) { - executorLogger?.debug('connected', { socket, payload }); + log.debug({ socket, payload }, 'connected'); }, ping(received, payload) { - executorLogger?.debug('ping', { received, payload }); + log.debug({ received, payload }, 'ping'); }, pong(received, payload) { - executorLogger?.debug('pong', { received, payload }); + log.debug({ received, payload }, 'pong'); }, message(message) { - executorLogger?.debug('message', { message }); + log.debug({ message }, 'message'); }, closed(event) { - executorLogger?.debug('closed', { event }); + log.debug({ event }, 'closed'); // no subscriptions and the lazy close timeout has passed - remove the client wsExecutorMap.delete(hash); }, error(error) { - executorLogger?.debug('error', { error }); + log.debug({ error }, 'error'); }, }, onClient, diff --git a/packages/transports/ws/tests/ws.spec.ts b/packages/transports/ws/tests/ws.spec.ts index 0e71544ec..e9d7f0c68 100644 --- a/packages/transports/ws/tests/ws.spec.ts +++ b/packages/transports/ws/tests/ws.spec.ts @@ -1,4 +1,4 @@ -import { JSONLogger } from '@graphql-hive/logger-json'; +import { Logger } from '@graphql-hive/logger'; import type { TransportEntry, TransportGetSubgraphExecutorOptions, @@ -79,7 +79,7 @@ async function createTServer( ...transportEntry, options, }, - logger: new JSONLogger(), + log: new Logger({ level: false }), } as unknown as TransportGetSubgraphExecutorOptions, onClient, ); diff --git a/tsconfig.json b/tsconfig.json index 583838cbd..cca157be4 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -61,11 +61,14 @@ ], "@graphql-tools/wrap": ["./packages/wrap/src/index.ts"], "@graphql-tools/executor-*": ["./packages/executors/*/src/index.ts"], - "@graphql-hive/logger-json": ["./packages/logger-json/src/index.ts"], - "@graphql-hive/logger-winston": [ - "./packages/logger-winston/src/index.ts" + "@graphql-hive/logger": ["./packages/logger/src/index.ts"], + "@graphql-hive/logger/request": ["./packages/logger/src/request.ts"], + "@graphql-hive/logger/writers/pino": [ + "./packages/logger/src/writers/pino.ts" + ], + "@graphql-hive/logger/writers/winston": [ + "./packages/logger/src/writers/winston.ts" ], - "@graphql-hive/logger-pino": ["./packages/logger-pino/src/index.ts"], "@graphql-hive/plugin-aws-sigv4": [ "./packages/plugins/aws-sigv4/src/index.ts" ], diff --git a/vitest.projects.ts b/vitest.projects.ts index 4dd6d516f..33180dd9d 100644 --- a/vitest.projects.ts +++ b/vitest.projects.ts @@ -40,7 +40,11 @@ export default defineWorkspace([ hookTimeout: testTimeout, testTimeout, benchmark: { - include: ['bench/**/*.bench.ts', 'e2e/**/*.bench.ts'], + include: [ + 'bench/**/*.bench.ts', + 'e2e/**/*.bench.ts', + '**/tests/**/*.bench.ts', + ], reporters: ['verbose'], outputJson: 'bench/results.json', }, diff --git a/yarn.lock b/yarn.lock index d7e011c3f..18c4a37c8 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3018,6 +3018,7 @@ __metadata: "@apollo/server": "npm:^4.12.2" "@apollo/subgraph": "npm:^2.11.0" "@graphql-hive/gateway": "workspace:^" + "@graphql-hive/logger": "workspace:^" "@graphql-mesh/compose-cli": "npm:^1.4.1" "@graphql-mesh/hmac-upstream-signature": "workspace:^" "@graphql-mesh/plugin-jwt-auth": "workspace:^" @@ -3941,8 +3942,9 @@ __metadata: "@envelop/core": "npm:^5.2.3" "@envelop/disable-introspection": "npm:^7.0.0" "@envelop/generic-auth": "npm:^9.0.0" + "@envelop/instrumentation": "npm:^1.0.0" "@graphql-hive/core": "npm:^0.12.0" - "@graphql-hive/logger-json": "workspace:^" + "@graphql-hive/logger": "workspace:^" "@graphql-hive/pubsub": "workspace:^" "@graphql-hive/signal": "workspace:^" "@graphql-hive/yoga": "npm:^0.42.1" @@ -4000,6 +4002,7 @@ __metadata: "@escape.tech/graphql-armor-max-tokens": "npm:^2.5.0" "@graphql-hive/gateway-runtime": "workspace:^" "@graphql-hive/importer": "workspace:^" + "@graphql-hive/logger": "workspace:^" "@graphql-hive/plugin-aws-sigv4": "workspace:^" "@graphql-hive/plugin-deduplicate-request": "workspace:^" "@graphql-hive/pubsub": "workspace:^" @@ -4073,52 +4076,24 @@ __metadata: languageName: unknown linkType: soft -"@graphql-hive/logger-json@workspace:^, @graphql-hive/logger-json@workspace:packages/logger-json": - version: 0.0.0-use.local - resolution: "@graphql-hive/logger-json@workspace:packages/logger-json" - dependencies: - "@graphql-mesh/cross-helpers": "npm:^0.4.10" - "@graphql-mesh/types": "npm:^0.104.0" - "@graphql-mesh/utils": "npm:^0.104.2" - cross-inspect: "npm:^1.0.1" - graphql: "npm:^16.9.0" - pkgroll: "npm:2.12.2" - tslib: "npm:^2.8.1" - peerDependencies: - graphql: ^15.9.0 || ^16.9.0 - languageName: unknown - linkType: soft - -"@graphql-hive/logger-pino@workspace:packages/logger-pino": +"@graphql-hive/logger@workspace:^, @graphql-hive/logger@workspace:packages/logger": version: 0.0.0-use.local - resolution: "@graphql-hive/logger-pino@workspace:packages/logger-pino" + resolution: "@graphql-hive/logger@workspace:packages/logger" dependencies: - "@graphql-mesh/types": "npm:^0.104.0" - "@graphql-mesh/utils": "npm:^0.104.2" + "@types/quick-format-unescaped": "npm:^4.0.3" "@whatwg-node/disposablestack": "npm:^0.0.6" - graphql: "npm:16.11.0" - pino: "npm:^9.7.0" - pkgroll: "npm:2.12.2" - tslib: "npm:^2.8.1" - peerDependencies: - graphql: ^15.9.0 || ^16.9.0 - pino: ^9.7.0 - languageName: unknown - linkType: soft - -"@graphql-hive/logger-winston@workspace:packages/logger-winston": - version: 0.0.0-use.local - resolution: "@graphql-hive/logger-winston@workspace:packages/logger-winston" - dependencies: - "@graphql-mesh/types": "npm:^0.104.0" - "@whatwg-node/disposablestack": "npm:^0.0.6" - graphql: "npm:16.11.0" - pkgroll: "npm:2.12.2" - tslib: "npm:^2.8.1" + fast-safe-stringify: "npm:^2.1.1" + pino: "npm:^9.6.0" + pkgroll: "npm:2.11.2" + quick-format-unescaped: "npm:^4.0.4" winston: "npm:^3.17.0" peerDependencies: - graphql: ^15.9.0 || ^16.9.0 - winston: ^3.17.0 + pino: ^9.6.0 + peerDependenciesMeta: + pino: + optional: true + winston: + optional: true languageName: unknown linkType: soft @@ -4372,6 +4347,7 @@ __metadata: dependencies: "@envelop/core": "npm:^5.2.3" "@envelop/instrumentation": "npm:^1.0.0" + "@graphql-hive/logger": "workspace:^" "@graphql-mesh/cross-helpers": "npm:^0.4.10" "@graphql-mesh/transport-common": "workspace:^" "@graphql-mesh/types": "npm:^0.104.0" @@ -4558,6 +4534,7 @@ __metadata: resolution: "@graphql-mesh/plugin-prometheus@workspace:packages/plugins/prometheus" dependencies: "@graphql-hive/gateway-runtime": "workspace:^" + "@graphql-hive/logger": "workspace:^" "@graphql-mesh/cross-helpers": "npm:^0.4.10" "@graphql-mesh/types": "npm:^0.104.0" "@graphql-mesh/utils": "npm:^0.104.2" @@ -4666,6 +4643,7 @@ __metadata: resolution: "@graphql-mesh/transport-common@workspace:packages/transports/common" dependencies: "@envelop/core": "npm:^5.2.3" + "@graphql-hive/logger": "workspace:^" "@graphql-hive/pubsub": "workspace:^" "@graphql-hive/signal": "workspace:^" "@graphql-mesh/cross-helpers": "npm:^0.4.10" @@ -8554,6 +8532,13 @@ __metadata: languageName: node linkType: hard +"@types/quick-format-unescaped@npm:^4.0.3": + version: 4.0.3 + resolution: "@types/quick-format-unescaped@npm:4.0.3" + checksum: 10c0/e95ba1dfa68f9d1dee785905c3c648b9fe1514c5261160e566d9a19731013d138269937632dea79a73859988a90621c9653504170c6a7415cacb426343f2a11a + languageName: node + linkType: hard + "@types/range-parser@npm:*": version: 1.2.7 resolution: "@types/range-parser@npm:1.2.7" @@ -13621,6 +13606,7 @@ __metadata: "@babel/plugin-proposal-explicit-resource-management": "npm:7.27.4" "@babel/plugin-transform-class-properties": "npm:7.27.1" "@babel/plugin-transform-class-static-block": "npm:7.27.1" + "@babel/plugin-transform-private-methods": "npm:^7.27.1" "@babel/preset-env": "npm:7.27.2" "@babel/preset-typescript": "npm:7.27.1" "@changesets/changelog-github": "npm:^0.5.0" @@ -17834,7 +17820,7 @@ __metadata: languageName: node linkType: hard -"pino@npm:^9.0.0, pino@npm:^9.7.0": +"pino@npm:^9.0.0": version: 9.7.0 resolution: "pino@npm:9.7.0" dependencies: @@ -17855,6 +17841,27 @@ __metadata: languageName: node linkType: hard +"pino@npm:^9.6.0": + version: 9.6.0 + resolution: "pino@npm:9.6.0" + dependencies: + atomic-sleep: "npm:^1.0.0" + fast-redact: "npm:^3.1.1" + on-exit-leak-free: "npm:^2.1.0" + pino-abstract-transport: "npm:^2.0.0" + pino-std-serializers: "npm:^7.0.0" + process-warning: "npm:^4.0.0" + quick-format-unescaped: "npm:^4.0.3" + real-require: "npm:^0.2.0" + safe-stable-stringify: "npm:^2.3.1" + sonic-boom: "npm:^4.0.1" + thread-stream: "npm:^3.0.0" + bin: + pino: bin.js + checksum: 10c0/bcd1e9d9b301bea13b95689ca9ad7105ae9451928fb6c0b67b3e58c5fe37cea1d40665f3d6641e3da00be0bbc17b89031e67abbc8ea6aac6164f399309fd78e7 + languageName: node + linkType: hard + "pirates@npm:^4.0.1, pirates@npm:^4.0.4, pirates@npm:^4.0.6": version: 4.0.7 resolution: "pirates@npm:4.0.7" @@ -18271,7 +18278,7 @@ __metadata: languageName: node linkType: hard -"quick-format-unescaped@npm:^4.0.3": +"quick-format-unescaped@npm:^4.0.3, quick-format-unescaped@npm:^4.0.4": version: 4.0.4 resolution: "quick-format-unescaped@npm:4.0.4" checksum: 10c0/fe5acc6f775b172ca5b4373df26f7e4fd347975578199e7d74b2ae4077f0af05baa27d231de1e80e8f72d88275ccc6028568a7a8c9ee5e7368ace0e18eff93a4