Skip to content

Commit 4f14e68

Browse files
committed
fix: implement default stepCountIs(5) stop condition and fix flaky test
- Added missing default stop condition in shouldStopExecution() that was documented but not implemented, preventing infinite tool execution loops - Changed test model from llama-3.1-8b-instruct to claude-sonnet-4.5 to fix timeout issues in chat-style tools test
1 parent 6729753 commit 4f14e68

File tree

2 files changed

+8
-6
lines changed

2 files changed

+8
-6
lines changed

src/lib/model-result.ts

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ import {
4747
import { executeTool } from './tool-executor.js';
4848
import { executeNextTurnParamsFunctions, applyNextTurnParamsToRequest } from './next-turn-params.js';
4949
import { hasExecuteFunction } from './tool-types.js';
50-
import { isStopConditionMet } from './stop-conditions.js';
50+
import { isStopConditionMet, stepCountIs } from './stop-conditions.js';
5151

5252
/**
5353
* Type guard for stream event with toReadableStream method
@@ -284,13 +284,15 @@ export class ModelResult<TTools extends readonly Tool[]> {
284284
/**
285285
* Check if stop conditions are met
286286
* Returns true if execution should stop
287+
* Default: stepCountIs(5) if no stopWhen is specified
287288
*/
288289
private async shouldStopExecution(): Promise<boolean> {
289-
if (!this.options.stopWhen) return false;
290+
// Use default of stepCountIs(5) if no stopWhen is specified
291+
const stopWhen = this.options.stopWhen ?? stepCountIs(5);
290292

291-
const stopConditions = Array.isArray(this.options.stopWhen)
292-
? this.options.stopWhen
293-
: [this.options.stopWhen];
293+
const stopConditions = Array.isArray(stopWhen)
294+
? stopWhen
295+
: [stopWhen];
294296

295297
return isStopConditionMet({
296298
stopConditions,

tests/e2e/call-model.test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -160,7 +160,7 @@ describe('callModel E2E Tests', () => {
160160

161161
it('should work with chat-style messages and chat-style tools together', async () => {
162162
const response = client.callModel({
163-
model: 'meta-llama/llama-3.1-8b-instruct',
163+
model: 'anthropic/claude-sonnet-4.5',
164164
input: fromChatMessages([
165165
{
166166
role: 'system',

0 commit comments

Comments
 (0)