Skip to content

Commit 8dd816d

Browse files
committed
ZhiPuAiChatModel support InternalToolExecutionMaxAttempts
Signed-off-by: lambochen <[email protected]>
1 parent bf9df71 commit 8dd816d

File tree

10 files changed

+129
-118
lines changed

10 files changed

+129
-118
lines changed

models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/AnthropicChatModel.java

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -440,10 +440,8 @@ Prompt buildRequestPrompt(Prompt prompt) {
440440
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionEnabled(),
441441
this.defaultOptions.getInternalToolExecutionEnabled()));
442442
requestOptions.setInternalToolExecutionMaxAttempts(
443-
ModelOptionsUtils.mergeOption(
444-
runtimeOptions.getInternalToolExecutionMaxAttempts(),
445-
defaultOptions.getInternalToolExecutionMaxAttempts())
446-
);
443+
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionMaxAttempts(),
444+
defaultOptions.getInternalToolExecutionMaxAttempts()));
447445
requestOptions.setToolNames(ToolCallingChatOptions.mergeToolNames(runtimeOptions.getToolNames(),
448446
this.defaultOptions.getToolNames()));
449447
requestOptions.setToolCallbacks(ToolCallingChatOptions.mergeToolCallbacks(runtimeOptions.getToolCallbacks(),
@@ -454,7 +452,8 @@ Prompt buildRequestPrompt(Prompt prompt) {
454452
else {
455453
requestOptions.setHttpHeaders(this.defaultOptions.getHttpHeaders());
456454
requestOptions.setInternalToolExecutionEnabled(this.defaultOptions.getInternalToolExecutionEnabled());
457-
requestOptions.setInternalToolExecutionMaxAttempts(this.defaultOptions.getInternalToolExecutionMaxAttempts());
455+
requestOptions
456+
.setInternalToolExecutionMaxAttempts(this.defaultOptions.getInternalToolExecutionMaxAttempts());
458457
requestOptions.setToolNames(this.defaultOptions.getToolNames());
459458
requestOptions.setToolCallbacks(this.defaultOptions.getToolCallbacks());
460459
requestOptions.setToolContext(this.defaultOptions.getToolContext());

models/spring-ai-azure-openai/src/main/java/org/springframework/ai/azure/openai/AzureOpenAiChatModel.java

Lines changed: 7 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -378,7 +378,8 @@ public Flux<ChatResponse> internalStream(Prompt prompt, ChatResponse previousCha
378378
});
379379

380380
return chatResponseFlux.flatMap(chatResponse -> {
381-
if (this.toolExecutionEligibilityPredicate.isToolExecutionRequired(prompt.getOptions(), chatResponse, attempts)) {
381+
if (this.toolExecutionEligibilityPredicate.isToolExecutionRequired(prompt.getOptions(), chatResponse,
382+
attempts)) {
382383
// FIXME: bounded elastic needs to be used since tool calling
383384
// is currently only synchronous
384385
return Flux.defer(() -> {
@@ -394,8 +395,7 @@ public Flux<ChatResponse> internalStream(Prompt prompt, ChatResponse previousCha
394395
// Send the tool execution result back to the model.
395396
return this.internalStream(
396397
new Prompt(toolExecutionResult.conversationHistory(), prompt.getOptions()),
397-
chatResponse,
398-
attempts + 1);
398+
chatResponse, attempts + 1);
399399
}
400400
}).subscribeOn(Schedulers.boundedElastic());
401401
}
@@ -669,11 +669,8 @@ Prompt buildRequestPrompt(Prompt prompt) {
669669
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionEnabled(),
670670
this.defaultOptions.getInternalToolExecutionEnabled()));
671671
runtimeOptions.setInternalToolExecutionMaxAttempts(
672-
ModelOptionsUtils.mergeOption(
673-
runtimeOptions.getInternalToolExecutionMaxAttempts(),
674-
this.defaultOptions.getInternalToolExecutionMaxAttempts()
675-
)
676-
);
672+
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionMaxAttempts(),
673+
this.defaultOptions.getInternalToolExecutionMaxAttempts()));
677674
requestOptions.setStreamUsage(ModelOptionsUtils.mergeOption(runtimeOptions.getStreamUsage(),
678675
this.defaultOptions.getStreamUsage()));
679676
requestOptions.setToolNames(ToolCallingChatOptions.mergeToolNames(runtimeOptions.getToolNames(),
@@ -685,7 +682,8 @@ Prompt buildRequestPrompt(Prompt prompt) {
685682
}
686683
else {
687684
requestOptions.setInternalToolExecutionEnabled(this.defaultOptions.getInternalToolExecutionEnabled());
688-
requestOptions.setInternalToolExecutionMaxAttempts(this.defaultOptions.getInternalToolExecutionMaxAttempts());
685+
requestOptions
686+
.setInternalToolExecutionMaxAttempts(this.defaultOptions.getInternalToolExecutionMaxAttempts());
689687
requestOptions.setStreamUsage(this.defaultOptions.getStreamUsage());
690688
requestOptions.setToolNames(this.defaultOptions.getToolNames());
691689
requestOptions.setToolCallbacks(this.defaultOptions.getToolCallbacks());

models/spring-ai-bedrock-converse/src/main/java/org/springframework/ai/bedrock/converse/BedrockProxyChatModel.java

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -311,11 +311,9 @@ Prompt buildRequestPrompt(Prompt prompt) {
311311
.internalToolExecutionEnabled(runtimeOptions.getInternalToolExecutionEnabled() != null
312312
? runtimeOptions.getInternalToolExecutionEnabled()
313313
: this.defaultOptions.getInternalToolExecutionEnabled())
314-
.internalToolExecutionMaxAttempts(
315-
ModelOptionsUtils.mergeOption(
316-
runtimeOptions.getInternalToolExecutionMaxAttempts(),
317-
this.defaultOptions.getInternalToolExecutionMaxAttempts())
318-
)
314+
.internalToolExecutionMaxAttempts(
315+
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionMaxAttempts(),
316+
this.defaultOptions.getInternalToolExecutionMaxAttempts()))
319317
.build();
320318
}
321319

@@ -682,8 +680,8 @@ private Flux<ChatResponse> internalStream(Prompt prompt, ChatResponse perviousCh
682680

683681
Flux<ChatResponse> chatResponseFlux = chatResponses.switchMap(chatResponse -> {
684682

685-
if (this.toolExecutionEligibilityPredicate.isToolExecutionRequired(prompt.getOptions(), chatResponse, attempts)
686-
&& chatResponse.hasFinishReasons(Set.of(StopReason.TOOL_USE.toString()))) {
683+
if (this.toolExecutionEligibilityPredicate.isToolExecutionRequired(prompt.getOptions(), chatResponse,
684+
attempts) && chatResponse.hasFinishReasons(Set.of(StopReason.TOOL_USE.toString()))) {
687685

688686
// FIXME: bounded elastic needs to be used since tool calling
689687
// is currently only synchronous
@@ -701,8 +699,7 @@ private Flux<ChatResponse> internalStream(Prompt prompt, ChatResponse perviousCh
701699
// Send the tool execution result back to the model.
702700
return this.internalStream(
703701
new Prompt(toolExecutionResult.conversationHistory(), prompt.getOptions()),
704-
chatResponse,
705-
attempts + 1);
702+
chatResponse, attempts + 1);
706703
}
707704
}).subscribeOn(Schedulers.boundedElastic());
708705
}

models/spring-ai-deepseek/src/main/java/org/springframework/ai/deepseek/DeepSeekChatModel.java

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -399,10 +399,8 @@ Prompt buildRequestPrompt(Prompt prompt) {
399399
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionEnabled(),
400400
this.defaultOptions.getInternalToolExecutionEnabled()));
401401
requestOptions.setInternalToolExecutionMaxAttempts(
402-
ModelOptionsUtils.mergeOption(
403-
runtimeOptions.getInternalToolExecutionMaxAttempts(),
404-
this.defaultOptions.getInternalToolExecutionMaxAttempts())
405-
);
402+
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionMaxAttempts(),
403+
this.defaultOptions.getInternalToolExecutionMaxAttempts()));
406404
requestOptions.setToolNames(ToolCallingChatOptions.mergeToolNames(runtimeOptions.getToolNames(),
407405
this.defaultOptions.getToolNames()));
408406
requestOptions.setToolCallbacks(ToolCallingChatOptions.mergeToolCallbacks(runtimeOptions.getToolCallbacks(),
@@ -412,7 +410,8 @@ Prompt buildRequestPrompt(Prompt prompt) {
412410
}
413411
else {
414412
requestOptions.setInternalToolExecutionEnabled(this.defaultOptions.getInternalToolExecutionEnabled());
415-
requestOptions.setInternalToolExecutionMaxAttempts(this.defaultOptions.getInternalToolExecutionMaxAttempts());
413+
requestOptions
414+
.setInternalToolExecutionMaxAttempts(this.defaultOptions.getInternalToolExecutionMaxAttempts());
416415
requestOptions.setToolNames(this.defaultOptions.getToolNames());
417416
requestOptions.setToolCallbacks(this.defaultOptions.getToolCallbacks());
418417
requestOptions.setToolContext(this.defaultOptions.getToolContext());

models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/MiniMaxChatModel.java

Lines changed: 48 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -245,34 +245,34 @@ private ChatResponse internalCall(Prompt requestPrompt, int attempts) {
245245
ChatCompletionRequest request = createRequest(requestPrompt, false);
246246

247247
ChatModelObservationContext observationContext = ChatModelObservationContext.builder()
248-
.prompt(requestPrompt)
249-
.provider(MiniMaxApiConstants.PROVIDER_NAME)
250-
.build();
248+
.prompt(requestPrompt)
249+
.provider(MiniMaxApiConstants.PROVIDER_NAME)
250+
.build();
251251

252252
ChatResponse response = ChatModelObservationDocumentation.CHAT_MODEL_OPERATION
253-
.observation(this.observationConvention, DEFAULT_OBSERVATION_CONVENTION, () -> observationContext,
254-
this.observationRegistry)
255-
.observe(() -> {
253+
.observation(this.observationConvention, DEFAULT_OBSERVATION_CONVENTION, () -> observationContext,
254+
this.observationRegistry)
255+
.observe(() -> {
256256

257-
ResponseEntity<ChatCompletion> completionEntity = this.retryTemplate
258-
.execute(ctx -> this.miniMaxApi.chatCompletionEntity(request));
257+
ResponseEntity<ChatCompletion> completionEntity = this.retryTemplate
258+
.execute(ctx -> this.miniMaxApi.chatCompletionEntity(request));
259259

260-
var chatCompletion = completionEntity.getBody();
260+
var chatCompletion = completionEntity.getBody();
261261

262-
if (chatCompletion == null) {
263-
logger.warn("No chat completion returned for prompt: {}", requestPrompt);
264-
return new ChatResponse(List.of());
265-
}
262+
if (chatCompletion == null) {
263+
logger.warn("No chat completion returned for prompt: {}", requestPrompt);
264+
return new ChatResponse(List.of());
265+
}
266266

267-
List<Choice> choices = chatCompletion.choices();
268-
if (choices == null) {
269-
logger.warn("No choices returned for prompt: {}, because: {}}", requestPrompt,
270-
chatCompletion.baseResponse().message());
271-
return new ChatResponse(List.of());
272-
}
267+
List<Choice> choices = chatCompletion.choices();
268+
if (choices == null) {
269+
logger.warn("No choices returned for prompt: {}, because: {}}", requestPrompt,
270+
chatCompletion.baseResponse().message());
271+
return new ChatResponse(List.of());
272+
}
273273

274-
List<Generation> generations = choices.stream().map(choice -> {
275-
// @formatter:off
274+
List<Generation> generations = choices.stream().map(choice -> {
275+
// @formatter:off
276276
// if the choice is a web search tool call, return last message of choice.messages
277277
ChatCompletionMessage message = null;
278278
if (choice.message() != null) {
@@ -288,28 +288,31 @@ else if (!CollectionUtils.isEmpty(choice.messages())) {
288288
"role", message != null && message.role() != null ? message.role().name() : "",
289289
"finishReason", choice.finishReason() != null ? choice.finishReason().name() : "");
290290
// @formatter:on
291-
return buildGeneration(message, choice.finishReason(), metadata);
292-
}).toList();
291+
return buildGeneration(message, choice.finishReason(), metadata);
292+
}).toList();
293293

294-
ChatResponse chatResponse = new ChatResponse(generations, from(completionEntity.getBody()));
294+
ChatResponse chatResponse = new ChatResponse(generations, from(completionEntity.getBody()));
295295

296-
observationContext.setResponse(chatResponse);
296+
observationContext.setResponse(chatResponse);
297297

298-
return chatResponse;
299-
});
298+
return chatResponse;
299+
});
300300

301-
if (this.toolExecutionEligibilityPredicate.isToolExecutionRequired(requestPrompt.getOptions(), response, attempts)) {
301+
if (this.toolExecutionEligibilityPredicate.isToolExecutionRequired(requestPrompt.getOptions(), response,
302+
attempts)) {
302303
var toolExecutionResult = this.toolCallingManager.executeToolCalls(requestPrompt, response);
303304
if (toolExecutionResult.returnDirect()) {
304305
// Return tool execution result directly to the client.
305306
return ChatResponse.builder()
306-
.from(response)
307-
.generations(ToolExecutionResult.buildGenerations(toolExecutionResult))
308-
.build();
307+
.from(response)
308+
.generations(ToolExecutionResult.buildGenerations(toolExecutionResult))
309+
.build();
309310
}
310311
else {
311312
// Send the tool execution result back to the model.
312-
return this.internalCall(new Prompt(toolExecutionResult.conversationHistory(), requestPrompt.getOptions()), attempts + 1);
313+
return this.internalCall(
314+
new Prompt(toolExecutionResult.conversationHistory(), requestPrompt.getOptions()),
315+
attempts + 1);
313316
}
314317
}
315318

@@ -334,16 +337,16 @@ private Flux<ChatResponse> internalStream(Prompt requestPrompt, int attempts) {
334337
ChatCompletionRequest request = createRequest(requestPrompt, true);
335338

336339
Flux<ChatCompletionChunk> completionChunks = this.retryTemplate
337-
.execute(ctx -> this.miniMaxApi.chatCompletionStream(request));
340+
.execute(ctx -> this.miniMaxApi.chatCompletionStream(request));
338341

339342
// For chunked responses, only the first chunk contains the choice role.
340343
// The rest of the chunks with same ID share the same role.
341344
ConcurrentHashMap<String, String> roleMap = new ConcurrentHashMap<>();
342345

343346
final ChatModelObservationContext observationContext = ChatModelObservationContext.builder()
344-
.prompt(requestPrompt)
345-
.provider(MiniMaxApiConstants.PROVIDER_NAME)
346-
.build();
347+
.prompt(requestPrompt)
348+
.provider(MiniMaxApiConstants.PROVIDER_NAME)
349+
.build();
347350

348351
Observation observation = ChatModelObservationDocumentation.CHAT_MODEL_OPERATION.observation(
349352
this.observationConvention, DEFAULT_OBSERVATION_CONVENTION, () -> observationContext,
@@ -354,12 +357,12 @@ private Flux<ChatResponse> internalStream(Prompt requestPrompt, int attempts) {
354357
// Convert the ChatCompletionChunk into a ChatCompletion to be able to reuse
355358
// the function call handling logic.
356359
Flux<ChatResponse> chatResponse = completionChunks.map(this::chunkToChatCompletion)
357-
.switchMap(chatCompletion -> Mono.just(chatCompletion).map(chatCompletion2 -> {
358-
try {
359-
@SuppressWarnings("null")
360-
String id = chatCompletion2.id();
360+
.switchMap(chatCompletion -> Mono.just(chatCompletion).map(chatCompletion2 -> {
361+
try {
362+
@SuppressWarnings("null")
363+
String id = chatCompletion2.id();
361364

362-
// @formatter:off
365+
// @formatter:off
363366
List<Generation> generations = chatCompletion2.choices().stream().map(choice -> {
364367
if (choice.message().role() != null) {
365368
roleMap.putIfAbsent(id, choice.message().role().name());
@@ -483,8 +486,8 @@ Prompt buildRequestPrompt(Prompt prompt) {
483486
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionEnabled(),
484487
this.defaultOptions.getInternalToolExecutionEnabled()));
485488
requestOptions.setInternalToolExecutionMaxAttempts(
486-
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionMaxAttempts(),
487-
this.defaultOptions.getInternalToolExecutionMaxAttempts()));
489+
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionMaxAttempts(),
490+
this.defaultOptions.getInternalToolExecutionMaxAttempts()));
488491
requestOptions.setToolNames(ToolCallingChatOptions.mergeToolNames(runtimeOptions.getToolNames(),
489492
this.defaultOptions.getToolNames()));
490493
requestOptions.setToolCallbacks(ToolCallingChatOptions.mergeToolCallbacks(runtimeOptions.getToolCallbacks(),
@@ -494,7 +497,8 @@ Prompt buildRequestPrompt(Prompt prompt) {
494497
}
495498
else {
496499
requestOptions.setInternalToolExecutionEnabled(this.defaultOptions.getInternalToolExecutionEnabled());
497-
requestOptions.setInternalToolExecutionMaxAttempts(this.defaultOptions.getInternalToolExecutionMaxAttempts());
500+
requestOptions
501+
.setInternalToolExecutionMaxAttempts(this.defaultOptions.getInternalToolExecutionMaxAttempts());
498502
requestOptions.setToolNames(this.defaultOptions.getToolNames());
499503
requestOptions.setToolCallbacks(this.defaultOptions.getToolCallbacks());
500504
requestOptions.setToolContext(this.defaultOptions.getToolContext());

models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/MistralAiChatModel.java

Lines changed: 4 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,6 @@
8585
* @author Alexandros Pappas
8686
* @author lambochen
8787
* @since 1.0.0
88-
*
8988
* @see ToolCallingChatOptions
9089
*/
9190
public class MistralAiChatModel implements ChatModel {
@@ -398,11 +397,8 @@ Prompt buildRequestPrompt(Prompt prompt) {
398397
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionEnabled(),
399398
this.defaultOptions.getInternalToolExecutionEnabled()));
400399
requestOptions.setInternalToolExecutionMaxAttempts(
401-
ModelOptionsUtils.mergeOption(
402-
runtimeOptions.getInternalToolExecutionMaxAttempts(),
403-
this.defaultOptions.getInternalToolExecutionMaxAttempts()
404-
)
405-
);
400+
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionMaxAttempts(),
401+
this.defaultOptions.getInternalToolExecutionMaxAttempts()));
406402
requestOptions.setToolNames(ToolCallingChatOptions.mergeToolNames(runtimeOptions.getToolNames(),
407403
this.defaultOptions.getToolNames()));
408404
requestOptions.setToolCallbacks(ToolCallingChatOptions.mergeToolCallbacks(runtimeOptions.getToolCallbacks(),
@@ -412,7 +408,8 @@ Prompt buildRequestPrompt(Prompt prompt) {
412408
}
413409
else {
414410
requestOptions.setInternalToolExecutionEnabled(this.defaultOptions.getInternalToolExecutionEnabled());
415-
requestOptions.setInternalToolExecutionMaxAttempts(this.defaultOptions.getInternalToolExecutionMaxAttempts());
411+
requestOptions
412+
.setInternalToolExecutionMaxAttempts(this.defaultOptions.getInternalToolExecutionMaxAttempts());
416413
requestOptions.setToolNames(this.defaultOptions.getToolNames());
417414
requestOptions.setToolCallbacks(this.defaultOptions.getToolCallbacks());
418415
requestOptions.setToolContext(this.defaultOptions.getToolContext());

models/spring-ai-ollama/src/main/java/org/springframework/ai/ollama/OllamaChatModel.java

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -86,7 +86,6 @@
8686
* @author Ilayaperumal Gopinathan
8787
* @author lambochen
8888
* @since 1.0.0
89-
*
9089
* @see ToolCallingChatOptions
9190
*/
9291
public class OllamaChatModel implements ChatModel {
@@ -394,10 +393,8 @@ Prompt buildRequestPrompt(Prompt prompt) {
394393
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionEnabled(),
395394
this.defaultOptions.getInternalToolExecutionEnabled()));
396395
requestOptions.setInternalToolExecutionMaxAttempts(
397-
ModelOptionsUtils.mergeOption(
398-
runtimeOptions.getInternalToolExecutionMaxAttempts(),
399-
this.defaultOptions.getInternalToolExecutionMaxAttempts())
400-
);
396+
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionMaxAttempts(),
397+
this.defaultOptions.getInternalToolExecutionMaxAttempts()));
401398
requestOptions.setToolNames(ToolCallingChatOptions.mergeToolNames(runtimeOptions.getToolNames(),
402399
this.defaultOptions.getToolNames()));
403400
requestOptions.setToolCallbacks(ToolCallingChatOptions.mergeToolCallbacks(runtimeOptions.getToolCallbacks(),
@@ -407,7 +404,8 @@ Prompt buildRequestPrompt(Prompt prompt) {
407404
}
408405
else {
409406
requestOptions.setInternalToolExecutionEnabled(this.defaultOptions.getInternalToolExecutionEnabled());
410-
requestOptions.setInternalToolExecutionMaxAttempts(this.defaultOptions.getInternalToolExecutionMaxAttempts());
407+
requestOptions
408+
.setInternalToolExecutionMaxAttempts(this.defaultOptions.getInternalToolExecutionMaxAttempts());
411409
requestOptions.setToolNames(this.defaultOptions.getToolNames());
412410
requestOptions.setToolCallbacks(this.defaultOptions.getToolCallbacks());
413411
requestOptions.setToolContext(this.defaultOptions.getToolContext());

models/spring-ai-vertex-ai-gemini/src/main/java/org/springframework/ai/vertexai/gemini/VertexAiGeminiChatModel.java

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -472,10 +472,8 @@ Prompt buildRequestPrompt(Prompt prompt) {
472472
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionEnabled(),
473473
this.defaultOptions.getInternalToolExecutionEnabled()));
474474
requestOptions.setInternalToolExecutionMaxAttempts(
475-
ModelOptionsUtils.mergeOption(
476-
runtimeOptions.getInternalToolExecutionMaxAttempts(),
477-
this.defaultOptions.getInternalToolExecutionMaxAttempts())
478-
);
475+
ModelOptionsUtils.mergeOption(runtimeOptions.getInternalToolExecutionMaxAttempts(),
476+
this.defaultOptions.getInternalToolExecutionMaxAttempts()));
479477
requestOptions.setToolNames(ToolCallingChatOptions.mergeToolNames(runtimeOptions.getToolNames(),
480478
this.defaultOptions.getToolNames()));
481479
requestOptions.setToolCallbacks(ToolCallingChatOptions.mergeToolCallbacks(runtimeOptions.getToolCallbacks(),
@@ -490,7 +488,8 @@ Prompt buildRequestPrompt(Prompt prompt) {
490488
}
491489
else {
492490
requestOptions.setInternalToolExecutionEnabled(this.defaultOptions.getInternalToolExecutionEnabled());
493-
requestOptions.setInternalToolExecutionMaxAttempts(this.defaultOptions.getInternalToolExecutionMaxAttempts());
491+
requestOptions
492+
.setInternalToolExecutionMaxAttempts(this.defaultOptions.getInternalToolExecutionMaxAttempts());
494493
requestOptions.setToolNames(this.defaultOptions.getToolNames());
495494
requestOptions.setToolCallbacks(this.defaultOptions.getToolCallbacks());
496495
requestOptions.setToolContext(this.defaultOptions.getToolContext());

0 commit comments

Comments
 (0)