11import { randomUUID } from "node:crypto" ;
2- import { checkServerIdentity } from "node:tls" ;
3- import {
4- CompressionTypes ,
5- Kafka ,
6- type Producer ,
7- type ProducerConfig ,
8- } from "kafkajs" ;
9- import { compress , decompress } from "lz4js" ;
2+ import type { ProducerConfig } from "kafkajs" ;
103import {
114 type UsageV2Event ,
125 type UsageV2Source ,
136 getTopicName ,
147} from "../core/usageV2.js" ;
15-
16- // CompressionCodecs is not exported properly in kafkajs. Source: https://github.com/tulios/kafkajs/issues/1391
17- import KafkaJS from "kafkajs" ;
18- const { CompressionCodecs } = KafkaJS ;
8+ import { KafkaProducer } from "./kafka.js" ;
199
2010/**
2111 * Creates a UsageV2Producer which opens a persistent TCP connection.
@@ -31,10 +21,8 @@ const { CompressionCodecs } = KafkaJS;
3121 * ```
3222 */
3323export class UsageV2Producer {
34- private kafka : Kafka ;
35- private producer : Producer | null = null ;
24+ private kafkaProducer : KafkaProducer ;
3625 private topic : string ;
37- private compression : CompressionTypes ;
3826
3927 constructor ( config : {
4028 /**
@@ -57,66 +45,22 @@ export class UsageV2Producer {
5745 username : string ;
5846 password : string ;
5947 } ) {
60- const {
61- producerName,
62- environment,
63- source,
64- shouldCompress = true ,
65- username,
66- password,
67- } = config ;
68-
69- this . kafka = new Kafka ( {
70- clientId : `${ producerName } -${ environment } ` ,
71- brokers :
72- environment === "production"
73- ? [ "warpstream.thirdweb.xyz:9092" ]
74- : [ "warpstream-dev.thirdweb.xyz:9092" ] ,
75- ssl : {
76- checkServerIdentity ( hostname , cert ) {
77- return checkServerIdentity ( hostname . toLowerCase ( ) , cert ) ;
78- } ,
79- } ,
80- sasl : {
81- mechanism : "plain" ,
82- username,
83- password,
84- } ,
48+ this . kafkaProducer = new KafkaProducer ( {
49+ producerName : config . producerName ,
50+ environment : config . environment ,
51+ shouldCompress : config . shouldCompress ,
52+ username : config . username ,
53+ password : config . password ,
8554 } ) ;
86-
87- this . topic = getTopicName ( source ) ;
88- this . compression = shouldCompress
89- ? CompressionTypes . LZ4
90- : CompressionTypes . None ;
55+ this . topic = getTopicName ( config . source ) ;
9156 }
9257
9358 /**
9459 * Connect the producer.
9560 * This must be called before calling `sendEvents()`.
9661 */
9762 async init ( configOverrides ?: ProducerConfig ) {
98- if ( this . compression === CompressionTypes . LZ4 ) {
99- CompressionCodecs [ CompressionTypes . LZ4 ] = ( ) => ( {
100- // biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
101- compress : ( encoder : { buffer : Buffer } ) => {
102- const compressed = compress ( encoder . buffer ) ;
103- // biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
104- return Buffer . from ( compressed ) ;
105- } ,
106- // biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
107- decompress : ( buffer : Buffer ) => {
108- const decompressed = decompress ( buffer ) ;
109- // biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
110- return Buffer . from ( decompressed ) ;
111- } ,
112- } ) ;
113- }
114-
115- this . producer = this . kafka . producer ( {
116- allowAutoTopicCreation : false ,
117- ...configOverrides ,
118- } ) ;
119- await this . producer . connect ( ) ;
63+ return this . kafkaProducer . init ( configOverrides ) ;
12064 }
12165
12266 /**
@@ -139,42 +83,23 @@ export class UsageV2Producer {
13983 timeout ?: number ;
14084 } ,
14185 ) : Promise < void > {
142- if ( ! this . producer ) {
143- throw new Error ( "Producer not initialized. Call `init()` first." ) ;
144- }
145-
146- const parsedEvents = events . map ( ( event ) => {
147- return {
148- ...event ,
149- id : event . id ?? randomUUID ( ) ,
150- created_at : event . created_at ?? new Date ( ) ,
151- // Remove the "team_" prefix, if any.
152- team_id : event . team_id . startsWith ( "team_" )
153- ? event . team_id . slice ( 5 )
154- : event . team_id ,
155- } ;
156- } ) ;
157-
158- await this . producer . send ( {
159- topic : this . topic ,
160- messages : parsedEvents . map ( ( event ) => ( {
161- value : JSON . stringify ( event ) ,
162- } ) ) ,
163- acks : - 1 , // All brokers must acknowledge
164- timeout : 10_000 , // 10 seconds
165- compression : this . compression ,
166- ...configOverrides ,
167- } ) ;
86+ const parsedEvents = events . map ( ( event ) => ( {
87+ ...event ,
88+ id : event . id ?? randomUUID ( ) ,
89+ created_at : event . created_at ?? new Date ( ) ,
90+ // Remove the "team_" prefix, if any.
91+ team_id : event . team_id . startsWith ( "team_" )
92+ ? event . team_id . slice ( 5 )
93+ : event . team_id ,
94+ } ) ) ;
95+ await this . kafkaProducer . send ( this . topic , parsedEvents , configOverrides ) ;
16896 }
16997
17098 /**
17199 * Disconnects UsageV2Producer.
172100 * Useful when shutting down the service to flush in-flight events.
173101 */
174102 async disconnect ( ) {
175- if ( this . producer ) {
176- await this . producer . disconnect ( ) ;
177- this . producer = null ;
178- }
103+ await this . kafkaProducer . disconnect ( ) ;
179104 }
180105}
0 commit comments