Skip to content

Commit 16327ae

Browse files
fix(utils/openai,aicommits): pass abortSignal for timeout, fix diff truncation and token usage keys and openrouter headers
2 parents dfe009f + 1f2443b commit 16327ae

File tree

5 files changed

+66
-24
lines changed

5 files changed

+66
-24
lines changed

src/commands/aicommits.ts

Lines changed: 36 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -126,6 +126,7 @@ export default async (
126126
try {
127127
const baseUrl = providerInstance.getBaseUrl();
128128
const apiKey = providerInstance.getApiKey() || '';
129+
const providerHeaders = providerInstance.getHeaders();
129130

130131
if (isChunking) {
131132
// Split files into chunks
@@ -136,9 +137,9 @@ export default async (
136137

137138
const chunkMessages: string[] = [];
138139
let totalUsage = {
139-
promptTokens: 0,
140-
completionTokens: 0,
141-
totalTokens: 0,
140+
prompt_tokens: 0,
141+
completion_tokens: 0,
142+
total_tokens: 0,
142143
};
143144

144145
for (const chunk of chunks) {
@@ -149,7 +150,7 @@ export default async (
149150
let diffToUse = chunkDiff.diff;
150151
if (diffToUse.length > maxDiffLength) {
151152
diffToUse =
152-
diffToUse.substring(diffToUse.length - maxDiffLength) +
153+
diffToUse.substring(0, maxDiffLength) +
153154
'\n\n[Diff truncated due to size]';
154155
}
155156
const result = await generateCommitMessage(
@@ -162,15 +163,23 @@ export default async (
162163
config['max-length'],
163164
config.type,
164165
timeout,
165-
customPrompt
166+
customPrompt,
167+
providerHeaders
166168
);
167169
chunkMessages.push(...result.messages);
168170
if (result.usage) {
169-
totalUsage.promptTokens +=
170-
(result.usage as any).promptTokens || 0;
171-
totalUsage.completionTokens +=
172-
(result.usage as any).completionTokens || 0;
173-
totalUsage.totalTokens += (result.usage as any).totalTokens || 0;
171+
totalUsage.prompt_tokens +=
172+
(result.usage as any).prompt_tokens ||
173+
(result.usage as any).promptTokens ||
174+
0;
175+
totalUsage.completion_tokens +=
176+
(result.usage as any).completion_tokens ||
177+
(result.usage as any).completionTokens ||
178+
0;
179+
totalUsage.total_tokens +=
180+
(result.usage as any).total_tokens ||
181+
(result.usage as any).totalTokens ||
182+
0;
174183
}
175184
}
176185
}
@@ -185,16 +194,23 @@ export default async (
185194
config['max-length'],
186195
config.type,
187196
timeout,
188-
customPrompt
197+
customPrompt,
198+
providerHeaders
189199
);
190200
messages = combineResult.messages;
191201
if (combineResult.usage) {
192-
totalUsage.promptTokens +=
193-
(combineResult.usage as any).promptTokens || 0;
194-
totalUsage.completionTokens +=
195-
(combineResult.usage as any).completionTokens || 0;
196-
totalUsage.totalTokens +=
197-
(combineResult.usage as any).totalTokens || 0;
202+
totalUsage.prompt_tokens +=
203+
(combineResult.usage as any).prompt_tokens ||
204+
(combineResult.usage as any).promptTokens ||
205+
0;
206+
totalUsage.completion_tokens +=
207+
(combineResult.usage as any).completion_tokens ||
208+
(combineResult.usage as any).completionTokens ||
209+
0;
210+
totalUsage.total_tokens +=
211+
(combineResult.usage as any).total_tokens ||
212+
(combineResult.usage as any).totalTokens ||
213+
0;
198214
}
199215
usage = totalUsage;
200216
} else {
@@ -203,7 +219,7 @@ export default async (
203219
let diffToUse = staged.diff;
204220
if (diffToUse.length > maxDiffLength) {
205221
diffToUse =
206-
diffToUse.substring(diffToUse.length - maxDiffLength) +
222+
diffToUse.substring(0, maxDiffLength) +
207223
'\n\n[Diff truncated due to size]';
208224
}
209225
const result = await generateCommitMessage(
@@ -216,7 +232,8 @@ export default async (
216232
config['max-length'],
217233
config.type,
218234
timeout,
219-
customPrompt
235+
customPrompt,
236+
providerHeaders
220237
);
221238
messages = result.messages;
222239
usage = result.usage;

src/commands/prepare-commit-msg-hook.ts

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -51,6 +51,7 @@ export default () =>
5151

5252
const baseUrl = providerInstance.getBaseUrl();
5353
const apiKey = providerInstance.getApiKey() || '';
54+
const providerHeaders = providerInstance.getHeaders();
5455

5556
// Use config timeout, or default per provider
5657
const timeout =
@@ -72,7 +73,9 @@ export default () =>
7273
config.generate,
7374
config['max-length'],
7475
config.type,
75-
timeout
76+
timeout,
77+
undefined,
78+
providerHeaders
7679
);
7780
messages = result.messages;
7881
} finally {

src/feature/providers/base.ts

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ export type ProviderDef = {
99
modelsFilter?: (models: any[]) => string[];
1010
defaultModels: string[];
1111
requiresApiKey: boolean;
12+
headers?: Record<string, string>;
1213
};
1314

1415
export class Provider {
@@ -111,6 +112,10 @@ export class Provider {
111112
return this.def.defaultModels;
112113
}
113114

115+
getHeaders(): Record<string, string> | undefined {
116+
return this.def.headers;
117+
}
118+
114119
validateConfig(): { valid: boolean; errors: string[] } {
115120
const errors: string[] = [];
116121
if (this.def.requiresApiKey && !this.getApiKey()) {

src/feature/providers/openrouter.ts

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -11,4 +11,8 @@ export const OpenRouterProvider: ProviderDef = {
1111
.map((m: any) => m.id),
1212
defaultModels: ['openai/gpt-oss-20b:free', 'z-ai/glm-4.5-air:free'],
1313
requiresApiKey: true,
14+
headers: {
15+
'HTTP-Referer': 'https://github.com/nutlope/aicommits',
16+
'X-Title': 'aicommits',
17+
},
1418
};

src/utils/openai.ts

Lines changed: 17 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@ const shortenCommitMessage = async (
5959
temperature: 0.2,
6060
maxRetries: 2,
6161
maxOutputTokens: 500,
62+
abortSignal: abortController.signal,
6263
});
6364
clearTimeout(timeoutId);
6465
return sanitizeMessage(result.text);
@@ -78,7 +79,8 @@ export const generateCommitMessage = async (
7879
maxLength: number,
7980
type: CommitType,
8081
timeout: number,
81-
customPrompt?: string
82+
customPrompt?: string,
83+
headers?: Record<string, string>
8284
) => {
8385
if (process.env.DEBUG) {
8486
console.log('Diff being sent to AI:');
@@ -93,6 +95,7 @@ export const generateCommitMessage = async (
9395
name: 'custom',
9496
apiKey,
9597
baseURL: baseUrl,
98+
headers,
9699
});
97100

98101
const abortController = new AbortController();
@@ -106,9 +109,16 @@ export const generateCommitMessage = async (
106109
temperature: 0.4,
107110
maxRetries: 2,
108111
maxOutputTokens: 2000,
109-
}).finally(() => clearTimeout(timeoutId))
112+
abortSignal: abortController.signal,
113+
})
110114
);
111-
const results = await Promise.all(promises);
115+
const results = await (async () => {
116+
try {
117+
return await Promise.all(promises);
118+
} finally {
119+
clearTimeout(timeoutId);
120+
}
121+
})();
112122
let texts = results.map((r) => r.text);
113123
let messages = deduplicateMessages(
114124
texts.map((text: string) => sanitizeMessage(text))
@@ -200,7 +210,8 @@ export const combineCommitMessages = async (
200210
maxLength: number,
201211
type: CommitType,
202212
timeout: number,
203-
customPrompt?: string
213+
customPrompt?: string,
214+
headers?: Record<string, string>
204215
) => {
205216
try {
206217
const provider =
@@ -210,6 +221,7 @@ export const combineCommitMessages = async (
210221
name: 'custom',
211222
apiKey,
212223
baseURL: baseUrl,
224+
headers,
213225
});
214226

215227
const abortController = new AbortController();
@@ -229,6 +241,7 @@ Do not add thanks, explanations, or any text outside the commit message.`;
229241
temperature: 0.4,
230242
maxRetries: 2,
231243
maxOutputTokens: 2000,
244+
abortSignal: abortController.signal,
232245
});
233246

234247
clearTimeout(timeoutId);

0 commit comments

Comments
 (0)