Skip to content

Commit af97d87

Browse files
✨ feat: Enhance AI commit generation with token tracking and streaming support
1 parent ced3122 commit af97d87

File tree

4 files changed

+41
-6
lines changed

4 files changed

+41
-6
lines changed

packages/lobe-commit/src/commands/Ai/index.tsx

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ const Ai = memo(() => {
1616
start,
1717
loadingInfo,
1818
loading,
19+
tokenUsage,
1920
message: streamingMessage,
2021
} = useCommits({ setMessage });
2122

@@ -34,7 +35,7 @@ const Ai = memo(() => {
3435
)
3536
}
3637
reverse
37-
title={`🤯 AI Commit Generator ${commitConfig.stream ? '(Streaming)' : ''}`}
38+
title={`🤯 AI Commit Generator ${commitConfig.stream ? '(Streaming)' : ''} ${tokenUsage > 0 ? `[Tokens: ${tokenUsage}]` : ''}`}
3839
>
3940
<AiMessageDisplay
4041
loading={loading}

packages/lobe-commit/src/commands/Commit/AiCommit.tsx

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ const AiCommit = memo(() => {
2323
loadingInfo,
2424
loading,
2525
restart,
26+
tokenUsage,
2627
message: streamingMessage,
2728
} = useCommits({ setMessage });
2829

@@ -71,7 +72,7 @@ const AiCommit = memo(() => {
7172
return (
7273
<Panel
7374
footer={!loading && message && <SelectInput items={items} onSelect={handleSelect} />}
74-
title={`🤯 AI Commit Generator ${commitConfig.stream ? '(Streaming)' : ''}`}
75+
title={`🤯 AI Commit Generator ${commitConfig.stream ? '(Streaming)' : ''} ${tokenUsage > 0 ? `[Tokens: ${tokenUsage}]` : ''}`}
7576
>
7677
{summary && (
7778
<SplitView direction={'bottom'}>

packages/lobe-commit/src/core/Commits.ts

Lines changed: 28 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -20,6 +20,7 @@ import { RecursiveCharacterTextSplitter } from '../../../common/textSplitter';
2020
export interface GenAiCommitProps {
2121
cacheSummary?: string;
2222
onStreamMessage?: (message: string) => void;
23+
onTokenUsage?: (tokenUsage: number) => void;
2324
setLoadingInfo: (text: string) => void;
2425
setSummary: (text: string) => void;
2526
}
@@ -52,7 +53,13 @@ export class Commits {
5253
this.prompt = promptCommits();
5354
}
5455

55-
async genCommit({ setLoadingInfo, setSummary, cacheSummary, onStreamMessage }: GenAiCommitProps) {
56+
async genCommit({
57+
setLoadingInfo,
58+
setSummary,
59+
cacheSummary,
60+
onStreamMessage,
61+
onTokenUsage,
62+
}: GenAiCommitProps) {
5663
setLoadingInfo(' Generating...');
5764

5865
// STEP 1
@@ -63,16 +70,21 @@ export class Commits {
6370
summary,
6471
});
6572

73+
// Calculate input tokens
74+
const inputTokens = calcToken(JSON.stringify(messages));
75+
6676
if (this.config.stream && onStreamMessage) {
67-
return this.genCommitStream(messages, onStreamMessage);
77+
return this.genCommitStream(messages, onStreamMessage, onTokenUsage, inputTokens);
6878
} else {
69-
return this.genCommitNonStream(messages);
79+
return this.genCommitNonStream(messages, onTokenUsage, inputTokens);
7080
}
7181
}
7282

7383
private async genCommitStream(
7484
messages: any[],
7585
onStreamMessage: (message: string) => void,
86+
onTokenUsage?: (tokenUsage: number) => void,
87+
inputTokens?: number,
7688
): Promise<string> {
7789
// 开始流式输出,先调用一次回调来切换UI状态
7890
onStreamMessage('');
@@ -85,6 +97,7 @@ export class Commits {
8597
});
8698

8799
let fullMessage = '';
100+
let tokenUsage = inputTokens || 0;
88101

89102
for await (const chunk of stream) {
90103
const content = chunk.choices[0]?.delta?.content || '';
@@ -95,19 +108,26 @@ export class Commits {
95108
fullMessage.replace(/\((.*?)\):/, (match, p1) => match && `(${p1.toLowerCase()}):`),
96109
);
97110
onStreamMessage(processedMessage);
111+
tokenUsage += calcToken(content);
98112
}
99113
}
100114

101115
if (!fullMessage) {
102116
alert.error('Diff summary failed, please check your network or try again...', true);
103117
}
104118

119+
onTokenUsage?.(tokenUsage);
120+
105121
return addEmojiToMessage(
106122
fullMessage.replace(/\((.*?)\):/, (match, p1) => match && `(${p1.toLowerCase()}):`),
107123
);
108124
}
109125

110-
private async genCommitNonStream(messages: any[]): Promise<string> {
126+
private async genCommitNonStream(
127+
messages: any[],
128+
onTokenUsage?: (tokenUsage: number) => void,
129+
inputTokens?: number,
130+
): Promise<string> {
111131
const completion = await this.client.chat.completions.create({
112132
messages: messages as OpenAI.Chat.Completions.ChatCompletionMessageParam[],
113133
model: this.config.modelName,
@@ -119,6 +139,10 @@ export class Commits {
119139
if (!result)
120140
alert.error('Diff summary failed, please check your network or try again...', true);
121141

142+
const outputTokens = calcToken(result!);
143+
const totalTokenUsage = (inputTokens || 0) + outputTokens;
144+
onTokenUsage?.(totalTokenUsage);
145+
122146
return addEmojiToMessage(
123147
result!.replace(/\((.*?)\):/, (match, p1) => match && `(${p1.toLowerCase()}):`),
124148
);

packages/lobe-commit/src/hooks/useCommits.ts

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ export const useCommits = ({ setMessage, onSuccess, onError, ...config }: Commit
1616
const [isGlobalLoading, setIsGlobalLoading] = useState(true);
1717
const [key, setKey] = useState<string>(Date.now().toString());
1818
const [streamingMessage, setStreamingMessage] = useState<string>('');
19+
const [tokenUsage, setTokenUsage] = useState<number>(0);
1920
const commitConfig = selectors.getCommitConfig();
2021

2122
const handleStreamMessage = useCallback(
@@ -27,6 +28,10 @@ export const useCommits = ({ setMessage, onSuccess, onError, ...config }: Commit
2728
[setMessage],
2829
);
2930

31+
const handleTokenUsage = useCallback((usage: number) => {
32+
setTokenUsage(usage);
33+
}, []);
34+
3035
const { data, isLoading } = useSWR(
3136
shouldFetch ? key : null,
3237
async () => {
@@ -36,6 +41,7 @@ export const useCommits = ({ setMessage, onSuccess, onError, ...config }: Commit
3641
return commits.current.genCommit({
3742
cacheSummary: summary,
3843
onStreamMessage: commitConfig.stream ? handleStreamMessage : undefined,
44+
onTokenUsage: handleTokenUsage,
3945
setLoadingInfo,
4046
setSummary,
4147
});
@@ -60,6 +66,7 @@ export const useCommits = ({ setMessage, onSuccess, onError, ...config }: Commit
6066
setKey(Date.now().toString());
6167
setIsGlobalLoading(true);
6268
setStreamingMessage('');
69+
setTokenUsage(0);
6370
setShouldFetch(true);
6471
}, []);
6572

@@ -68,6 +75,7 @@ export const useCommits = ({ setMessage, onSuccess, onError, ...config }: Commit
6875
setKey(Date.now().toString());
6976
setIsGlobalLoading(true);
7077
setStreamingMessage('');
78+
setTokenUsage(0);
7179
setShouldFetch(true);
7280
}, []);
7381

@@ -84,5 +92,6 @@ export const useCommits = ({ setMessage, onSuccess, onError, ...config }: Commit
8492
start,
8593
stop,
8694
summary,
95+
tokenUsage,
8796
};
8897
};

0 commit comments

Comments
 (0)