Skip to content

Commit 0595c37

Browse files
authored
[service-utils] switch to @confluentinc/kafka-javascript (#6345)
1 parent 2a4b5dc commit 0595c37

File tree

6 files changed

+281
-238
lines changed

6 files changed

+281
-238
lines changed

.changeset/pink-states-tell.md

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,5 @@
1+
---
2+
"@thirdweb-dev/service-utils": patch
3+
---
4+
5+
[service-utils] Use @confluentinc/kafka-javascript

packages/service-utils/package.json

Lines changed: 1 addition & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -45,14 +45,12 @@
4545
],
4646
"sideEffects": false,
4747
"dependencies": {
48+
"@confluentinc/kafka-javascript": "^1.2.0",
4849
"aws4fetch": "1.0.20",
49-
"kafkajs": "2.2.4",
50-
"lz4js": "0.2.0",
5150
"zod": "3.24.2"
5251
},
5352
"devDependencies": {
5453
"@cloudflare/workers-types": "4.20250224.0",
55-
"@types/lz4js": "0.2.1",
5654
"@types/node": "22.13.5",
5755
"typescript": "5.7.3",
5856
"vitest": "3.0.7"

packages/service-utils/src/core/usageV2.ts

Lines changed: 12 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,14 @@ export const USAGE_V2_SOURCES = [
1010
] as const;
1111
export type UsageV2Source = (typeof USAGE_V2_SOURCES)[number];
1212
export function getTopicName(source: UsageV2Source) {
13-
return `usage_v2.raw_${source}`;
13+
switch (source) {
14+
// Some sources are sent from clients and are written to an "untrusted" table.
15+
case "sdk":
16+
case "engine":
17+
return `usage_v2.untrusted_raw_${source}`;
18+
default:
19+
return `usage_v2.raw_${source}`;
20+
}
1421
}
1522

1623
export interface ClientUsageV2Event {
@@ -55,6 +62,10 @@ export interface ClientUsageV2Event {
5562
* The product version, if available.
5663
*/
5764
product_version?: string;
65+
/**
66+
* The event version. Defaults to 1.
67+
*/
68+
version?: number;
5869
/**
5970
* An object of arbitrary key-value pairs.
6071
* Values can be boolean, number, string, Date, or null.
Lines changed: 45 additions & 76 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,7 @@
1-
import { checkServerIdentity } from "node:tls";
2-
import { CompressionTypes, Kafka, type Producer } from "kafkajs";
3-
import { compress, decompress } from "lz4js";
4-
5-
// CompressionCodecs is not exported properly in kafkajs. Source: https://github.com/tulios/kafkajs/issues/1391
6-
import KafkaJS from "kafkajs";
7-
const { CompressionCodecs } = KafkaJS;
1+
import {
2+
KafkaJS,
3+
type ProducerGlobalConfig,
4+
} from "@confluentinc/kafka-javascript";
85

96
/**
107
* Reference: https://kafka.js.org/docs/producing#producing-messages
@@ -33,104 +30,73 @@ export interface KafkaProducerSendOptions {
3330
* ```
3431
*/
3532
export class KafkaProducer {
36-
private kafka: Kafka;
37-
private producer: Producer | null = null;
38-
private compression: CompressionTypes;
33+
private producer: KafkaJS.Producer;
34+
private isConnected = false;
3935

40-
constructor(config: {
36+
constructor(options: {
4137
/**
4238
* A descriptive name for your service. Example: "storage-server"
4339
*/
4440
producerName: string;
4541
/**
46-
* The environment the service is running in.
47-
*/
48-
environment: "development" | "production";
49-
/**
50-
* Whether to compress the events.
42+
* A comma-separated list of `host[:port]` Kafka servers.
5143
*/
52-
shouldCompress?: boolean;
53-
44+
kafkaServers: string;
5445
username: string;
5546
password: string;
47+
48+
/**
49+
* Configuration for the Kafka producer.
50+
*/
51+
config?: ProducerGlobalConfig;
5652
}) {
57-
const {
58-
producerName,
59-
environment,
60-
shouldCompress = true,
61-
username,
62-
password,
63-
} = config;
53+
const { producerName, kafkaServers, username, password, config } = options;
6454

65-
this.kafka = new Kafka({
66-
clientId: `${producerName}-${environment}`,
67-
brokers:
68-
environment === "production"
69-
? ["warpstream.thirdweb.xyz:9092"]
70-
: ["warpstream-dev.thirdweb.xyz:9092"],
71-
ssl: {
72-
checkServerIdentity(hostname, cert) {
73-
return checkServerIdentity(hostname.toLowerCase(), cert);
74-
},
75-
},
76-
sasl: {
77-
mechanism: "plain",
78-
username,
79-
password,
80-
},
55+
this.producer = new KafkaJS.Kafka({}).producer({
56+
"client.id": producerName,
57+
"bootstrap.servers": kafkaServers,
58+
"security.protocol": "sasl_ssl",
59+
"sasl.mechanisms": "PLAIN",
60+
"sasl.username": username,
61+
"sasl.password": password,
62+
"compression.codec": "lz4",
63+
"allow.auto.create.topics": true,
64+
// All configuration can be overridden.
65+
...config,
8166
});
67+
}
8268

83-
if (shouldCompress) {
84-
this.compression = CompressionTypes.LZ4;
85-
86-
CompressionCodecs[CompressionTypes.LZ4] = () => ({
87-
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
88-
compress: (encoder: { buffer: Buffer }) => {
89-
const compressed = compress(encoder.buffer);
90-
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
91-
return Buffer.from(compressed);
92-
},
93-
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
94-
decompress: (buffer: Buffer) => {
95-
const decompressed = decompress(buffer);
96-
// biome-ignore lint/style/noRestrictedGlobals: kafkajs expects a Buffer
97-
return Buffer.from(decompressed);
98-
},
99-
});
100-
} else {
101-
this.compression = CompressionTypes.None;
102-
}
69+
/**
70+
* Connects the producer. Can be called explicitly at the start of your service, or will be called automatically when sending messages.
71+
*/
72+
async connect() {
73+
await this.producer.connect();
74+
this.isConnected = true;
10375
}
10476

10577
/**
10678
* Send messages to a Kafka topic.
10779
* This method may throw. To call this non-blocking:
80+
* ```ts
81+
* void kafka.send(topic, events).catch((e) => console.error(e))
82+
* ```
83+
*
10884
* @param topic
10985
* @param messages
110-
* @param configOverrides
11186
*/
11287
async send(
11388
topic: string,
11489
messages: Record<string, unknown>[],
115-
options?: KafkaProducerSendOptions,
11690
): Promise<void> {
117-
if (!this.producer) {
118-
this.producer = this.kafka.producer({
119-
allowAutoTopicCreation: options?.allowAutoTopicCreation ?? false,
120-
maxInFlightRequests: options?.maxInFlightRequests ?? 2000,
121-
retry: { retries: options?.retries ?? 5 },
122-
});
123-
await this.producer.connect();
91+
if (!this.isConnected) {
92+
await this.connect();
12493
}
12594

12695
await this.producer.send({
12796
topic,
12897
messages: messages.map((m) => ({
12998
value: JSON.stringify(m),
13099
})),
131-
compression: this.compression,
132-
acks: options?.acks ?? -1, // Default: All brokers must acknowledge
133-
timeout: options?.timeout ?? 10_000, // Default: 10 seconds
134100
});
135101
}
136102

@@ -139,9 +105,12 @@ export class KafkaProducer {
139105
* Useful when shutting down the service to flush in-flight events.
140106
*/
141107
async disconnect() {
142-
if (this.producer) {
143-
await this.producer.disconnect();
144-
this.producer = null;
108+
if (this.isConnected) {
109+
try {
110+
await this.producer.flush();
111+
await this.producer.disconnect();
112+
} catch {}
113+
this.isConnected = false;
145114
}
146115
}
147116
}

packages/service-utils/src/node/usageV2.ts

Lines changed: 9 additions & 14 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,7 @@ import {
44
type UsageV2Source,
55
getTopicName,
66
} from "../core/usageV2.js";
7-
import { KafkaProducer, type KafkaProducerSendOptions } from "./kafka.js";
7+
import { KafkaProducer } from "./kafka.js";
88

99
/**
1010
* Creates a UsageV2Producer which opens a persistent TCP connection.
@@ -35,18 +35,13 @@ export class UsageV2Producer {
3535
* The product where usage is coming from.
3636
*/
3737
source: UsageV2Source;
38-
/**
39-
* Whether to compress the events.
40-
*/
41-
shouldCompress?: boolean;
4238

4339
username: string;
4440
password: string;
4541
}) {
4642
this.kafkaProducer = new KafkaProducer({
4743
producerName: config.producerName,
4844
environment: config.environment,
49-
shouldCompress: config.shouldCompress,
5045
username: config.username,
5146
password: config.password,
5247
});
@@ -56,25 +51,25 @@ export class UsageV2Producer {
5651
/**
5752
* Send usageV2 events.
5853
* This method may throw. To call this non-blocking:
54+
* ```ts
55+
* void usageV2.sendEvents(events).catch((e) => console.error(e))
56+
* ```
57+
*
5958
* @param events
6059
*/
61-
async sendEvents(
62-
events: UsageV2Event[],
63-
/**
64-
* Reference: https://kafka.js.org/docs/producing#producing-messages
65-
*/
66-
options?: KafkaProducerSendOptions,
67-
): Promise<void> {
60+
async sendEvents(events: UsageV2Event[]): Promise<void> {
6861
const parsedEvents = events.map((event) => ({
6962
...event,
63+
// Default to a generated UUID.
7064
id: event.id ?? randomUUID(),
65+
// Default to now.
7166
created_at: event.created_at ?? new Date(),
7267
// Remove the "team_" prefix, if any.
7368
team_id: event.team_id.startsWith("team_")
7469
? event.team_id.slice(5)
7570
: event.team_id,
7671
}));
77-
await this.kafkaProducer.send(this.topic, parsedEvents, options);
72+
await this.kafkaProducer.send(this.topic, parsedEvents);
7873
}
7974

8075
/**

0 commit comments

Comments
 (0)