Skip to content

Commit 11bb360

Browse files
committed
refactor ChatOptions Builder
- Deprecate existing ChatOptionsBuilder and its inner class DefaultChatOptions - Create a new builder interface ChatOptions.Builder for building the Chat options - Create an explicit DefaultChatOptions - Create DefaultChatOptionBuilder which can create DefaultChatOptions Resolves #1875
1 parent a474b12 commit 11bb360

File tree

32 files changed

+405
-151
lines changed

32 files changed

+405
-151
lines changed

models/spring-ai-anthropic/src/main/java/org/springframework/ai/anthropic/AnthropicChatModel.java

Lines changed: 7 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,6 @@
5757
import org.springframework.ai.chat.observation.ChatModelObservationDocumentation;
5858
import org.springframework.ai.chat.observation.DefaultChatModelObservationConvention;
5959
import org.springframework.ai.chat.prompt.ChatOptions;
60-
import org.springframework.ai.chat.prompt.ChatOptionsBuilder;
6160
import org.springframework.ai.chat.prompt.Prompt;
6261
import org.springframework.ai.model.Media;
6362
import org.springframework.ai.model.ModelOptionsUtils;
@@ -471,13 +470,13 @@ private List<AnthropicApi.Tool> getFunctionTools(Set<String> functionNames) {
471470
}
472471

473472
private ChatOptions buildRequestOptions(AnthropicApi.ChatCompletionRequest request) {
474-
return ChatOptionsBuilder.builder()
475-
.withModel(request.model())
476-
.withMaxTokens(request.maxTokens())
477-
.withStopSequences(request.stopSequences())
478-
.withTemperature(request.temperature())
479-
.withTopK(request.topK())
480-
.withTopP(request.topP())
473+
return ChatOptions.builder()
474+
.model(request.model())
475+
.maxTokens(request.maxTokens())
476+
.stopSequences(request.stopSequences())
477+
.temperature(request.temperature())
478+
.topK(request.topK())
479+
.topP(request.topP())
481480
.build();
482481
}
483482

models/spring-ai-bedrock-converse/src/main/java/org/springframework/ai/bedrock/converse/BedrockProxyChatModel.java

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -93,7 +93,6 @@
9393
import org.springframework.ai.chat.observation.ChatModelObservationDocumentation;
9494
import org.springframework.ai.chat.observation.DefaultChatModelObservationConvention;
9595
import org.springframework.ai.chat.prompt.ChatOptions;
96-
import org.springframework.ai.chat.prompt.ChatOptionsBuilder;
9796
import org.springframework.ai.chat.prompt.Prompt;
9897
import org.springframework.ai.model.Media;
9998
import org.springframework.ai.model.ModelOptionsUtils;
@@ -219,13 +218,13 @@ private ChatResponse internalCall(Prompt prompt, ChatResponse perviousChatRespon
219218
}
220219

221220
private ChatOptions buildRequestOptions(ConverseRequest request) {
222-
return ChatOptionsBuilder.builder()
223-
.withModel(request.modelId())
224-
.withMaxTokens(request.inferenceConfig().maxTokens())
225-
.withStopSequences(request.inferenceConfig().stopSequences())
226-
.withTemperature(request.inferenceConfig().temperature() != null
221+
return ChatOptions.builder()
222+
.model(request.modelId())
223+
.maxTokens(request.inferenceConfig().maxTokens())
224+
.stopSequences(request.inferenceConfig().stopSequences())
225+
.temperature(request.inferenceConfig().temperature() != null
227226
? request.inferenceConfig().temperature().doubleValue() : null)
228-
.withTopP(request.inferenceConfig().topP() != null ? request.inferenceConfig().topP().doubleValue() : null)
227+
.topP(request.inferenceConfig().topP() != null ? request.inferenceConfig().topP().doubleValue() : null)
229228
.build();
230229
}
231230

models/spring-ai-bedrock-converse/src/test/java/org/springframework/ai/bedrock/converse/experiments/BedrockConverseChatModelMain.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,7 @@
2020
import software.amazon.awssdk.regions.Region;
2121

2222
import org.springframework.ai.bedrock.converse.BedrockProxyChatModel;
23-
import org.springframework.ai.chat.prompt.ChatOptionsBuilder;
23+
import org.springframework.ai.chat.prompt.ChatOptions;
2424
import org.springframework.ai.chat.prompt.Prompt;
2525

2626
/**
@@ -40,7 +40,7 @@ public static void main(String[] args) {
4040

4141
// String modelId = "anthropic.claude-3-5-sonnet-20240620-v1:0";
4242
String modelId = "ai21.jamba-1-5-large-v1:0";
43-
var prompt = new Prompt("Tell me a joke?", ChatOptionsBuilder.builder().withModel(modelId).build());
43+
var prompt = new Prompt("Tell me a joke?", ChatOptions.builder().model(modelId).build());
4444

4545
var chatModel = BedrockProxyChatModel.builder()
4646
.withCredentialsProvider(EnvironmentVariableCredentialsProvider.create())

models/spring-ai-huggingface/src/main/java/org/springframework/ai/huggingface/HuggingfaceChatModel.java

Lines changed: 0 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,6 @@
2727
import org.springframework.ai.chat.model.ChatModel;
2828
import org.springframework.ai.chat.model.ChatResponse;
2929
import org.springframework.ai.chat.model.Generation;
30-
import org.springframework.ai.chat.prompt.ChatOptions;
31-
import org.springframework.ai.chat.prompt.ChatOptionsBuilder;
3230
import org.springframework.ai.chat.prompt.Prompt;
3331
import org.springframework.ai.huggingface.api.TextGenerationInferenceApi;
3432
import org.springframework.ai.huggingface.invoker.ApiClient;
@@ -128,9 +126,4 @@ public void setMaxNewTokens(int maxNewTokens) {
128126
this.maxNewTokens = maxNewTokens;
129127
}
130128

131-
@Override
132-
public ChatOptions getDefaultOptions() {
133-
return ChatOptionsBuilder.builder().build();
134-
}
135-
136129
}

models/spring-ai-minimax/src/main/java/org/springframework/ai/minimax/MiniMaxChatModel.java

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,6 @@
4848
import org.springframework.ai.chat.observation.ChatModelObservationDocumentation;
4949
import org.springframework.ai.chat.observation.DefaultChatModelObservationConvention;
5050
import org.springframework.ai.chat.prompt.ChatOptions;
51-
import org.springframework.ai.chat.prompt.ChatOptionsBuilder;
5251
import org.springframework.ai.chat.prompt.Prompt;
5352
import org.springframework.ai.minimax.api.MiniMaxApi;
5453
import org.springframework.ai.minimax.api.MiniMaxApi.ChatCompletion;
@@ -374,14 +373,14 @@ protected boolean isToolCall(Generation generation, Set<String> toolCallFinishRe
374373
}
375374

376375
private ChatOptions buildRequestOptions(ChatCompletionRequest request) {
377-
return ChatOptionsBuilder.builder()
378-
.withModel(request.model())
379-
.withFrequencyPenalty(request.frequencyPenalty())
380-
.withMaxTokens(request.maxTokens())
381-
.withPresencePenalty(request.presencePenalty())
382-
.withStopSequences(request.stop())
383-
.withTemperature(request.temperature())
384-
.withTopP(request.topP())
376+
return ChatOptions.builder()
377+
.model(request.model())
378+
.frequencyPenalty(request.frequencyPenalty())
379+
.maxTokens(request.maxTokens())
380+
.presencePenalty(request.presencePenalty())
381+
.stopSequences(request.stop())
382+
.temperature(request.temperature())
383+
.topP(request.topP())
385384
.build();
386385
}
387386

models/spring-ai-mistral-ai/src/main/java/org/springframework/ai/mistralai/MistralAiChatModel.java

Lines changed: 6 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -46,7 +46,6 @@
4646
import org.springframework.ai.chat.observation.ChatModelObservationDocumentation;
4747
import org.springframework.ai.chat.observation.DefaultChatModelObservationConvention;
4848
import org.springframework.ai.chat.prompt.ChatOptions;
49-
import org.springframework.ai.chat.prompt.ChatOptionsBuilder;
5049
import org.springframework.ai.chat.prompt.Prompt;
5150
import org.springframework.ai.mistralai.api.MistralAiApi;
5251
import org.springframework.ai.mistralai.api.MistralAiApi.ChatCompletion;
@@ -406,12 +405,12 @@ private List<MistralAiApi.FunctionTool> getFunctionTools(Set<String> functionNam
406405
}
407406

408407
private ChatOptions buildRequestOptions(MistralAiApi.ChatCompletionRequest request) {
409-
return ChatOptionsBuilder.builder()
410-
.withModel(request.model())
411-
.withMaxTokens(request.maxTokens())
412-
.withStopSequences(request.stop())
413-
.withTemperature(request.temperature())
414-
.withTopP(request.topP())
408+
return ChatOptions.builder()
409+
.model(request.model())
410+
.maxTokens(request.maxTokens())
411+
.stopSequences(request.stop())
412+
.temperature(request.temperature())
413+
.topP(request.topP())
415414
.build();
416415
}
417416

models/spring-ai-moonshot/src/main/java/org/springframework/ai/moonshot/MoonshotChatModel.java

Lines changed: 8 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,6 @@
4747
import org.springframework.ai.chat.observation.ChatModelObservationDocumentation;
4848
import org.springframework.ai.chat.observation.DefaultChatModelObservationConvention;
4949
import org.springframework.ai.chat.prompt.ChatOptions;
50-
import org.springframework.ai.chat.prompt.ChatOptionsBuilder;
5150
import org.springframework.ai.chat.prompt.Prompt;
5251
import org.springframework.ai.model.ModelOptionsUtils;
5352
import org.springframework.ai.model.function.FunctionCallback;
@@ -420,14 +419,14 @@ else if (message.getMessageType() == MessageType.TOOL) {
420419
}
421420

422421
private ChatOptions buildRequestOptions(MoonshotApi.ChatCompletionRequest request) {
423-
return ChatOptionsBuilder.builder()
424-
.withModel(request.model())
425-
.withFrequencyPenalty(request.frequencyPenalty())
426-
.withMaxTokens(request.maxTokens())
427-
.withPresencePenalty(request.presencePenalty())
428-
.withStopSequences(request.stop())
429-
.withTemperature(request.temperature())
430-
.withTopP(request.topP())
422+
return ChatOptions.builder()
423+
.model(request.model())
424+
.frequencyPenalty(request.frequencyPenalty())
425+
.maxTokens(request.maxTokens())
426+
.presencePenalty(request.presencePenalty())
427+
.stopSequences(request.stop())
428+
.temperature(request.temperature())
429+
.topP(request.topP())
431430
.build();
432431
}
433432

models/spring-ai-ollama/src/main/java/org/springframework/ai/ollama/OllamaChatModel.java

Lines changed: 9 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -45,7 +45,6 @@
4545
import org.springframework.ai.chat.observation.ChatModelObservationDocumentation;
4646
import org.springframework.ai.chat.observation.DefaultChatModelObservationConvention;
4747
import org.springframework.ai.chat.prompt.ChatOptions;
48-
import org.springframework.ai.chat.prompt.ChatOptionsBuilder;
4948
import org.springframework.ai.chat.prompt.Prompt;
5049
import org.springframework.ai.model.ModelOptionsUtils;
5150
import org.springframework.ai.model.function.FunctionCallback;
@@ -421,15 +420,15 @@ private List<ChatRequest.Tool> getFunctionTools(Set<String> functionNames) {
421420

422421
private ChatOptions buildRequestOptions(OllamaApi.ChatRequest request) {
423422
var options = ModelOptionsUtils.mapToClass(request.options(), OllamaOptions.class);
424-
return ChatOptionsBuilder.builder()
425-
.withModel(request.model())
426-
.withFrequencyPenalty(options.getFrequencyPenalty())
427-
.withMaxTokens(options.getMaxTokens())
428-
.withPresencePenalty(options.getPresencePenalty())
429-
.withStopSequences(options.getStopSequences())
430-
.withTemperature(options.getTemperature())
431-
.withTopK(options.getTopK())
432-
.withTopP(options.getTopP())
423+
return ChatOptions.builder()
424+
.model(request.model())
425+
.frequencyPenalty(options.getFrequencyPenalty())
426+
.maxTokens(options.getMaxTokens())
427+
.presencePenalty(options.getPresencePenalty())
428+
.stopSequences(options.getStopSequences())
429+
.temperature(options.getTemperature())
430+
.topK(options.getTopK())
431+
.topP(options.getTopP())
433432
.build();
434433
}
435434

models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelIT.java

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@
2929
import org.springframework.ai.chat.metadata.Usage;
3030
import org.springframework.ai.chat.model.ChatResponse;
3131
import org.springframework.ai.chat.model.Generation;
32-
import org.springframework.ai.chat.prompt.ChatOptionsBuilder;
32+
import org.springframework.ai.chat.prompt.ChatOptions;
3333
import org.springframework.ai.chat.prompt.Prompt;
3434
import org.springframework.ai.chat.prompt.PromptTemplate;
3535
import org.springframework.ai.chat.prompt.SystemPromptTemplate;
@@ -91,7 +91,7 @@ void roleTest() {
9191
UserMessage userMessage = new UserMessage("Tell me about 5 famous pirates from the Golden Age of Piracy.");
9292

9393
// portable/generic options
94-
var portableOptions = ChatOptionsBuilder.builder().withTemperature(0.7).build();
94+
var portableOptions = ChatOptions.builder().temperature(0.7).build();
9595

9696
Prompt prompt = new Prompt(List.of(systemMessage, userMessage), portableOptions);
9797

models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatRequestTests.java

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,6 @@
1919
import org.junit.jupiter.api.Test;
2020

2121
import org.springframework.ai.chat.prompt.ChatOptions;
22-
import org.springframework.ai.chat.prompt.ChatOptionsBuilder;
2322
import org.springframework.ai.chat.prompt.Prompt;
2423
import org.springframework.ai.ollama.api.OllamaApi;
2524
import org.springframework.ai.ollama.api.OllamaOptions;
@@ -78,11 +77,7 @@ public void createRequestWithPromptOllamaOptions() {
7877
public void createRequestWithPromptPortableChatOptions() {
7978

8079
// Ollama runtime options.
81-
ChatOptions portablePromptOptions = ChatOptionsBuilder.builder()
82-
.withTemperature(0.9)
83-
.withTopK(100)
84-
.withTopP(0.6)
85-
.build();
80+
ChatOptions portablePromptOptions = ChatOptions.builder().temperature(0.9).topK(100).topP(0.6).build();
8681

8782
var request = this.chatModel.ollamaChatRequest(new Prompt("Test message content", portablePromptOptions), true);
8883

0 commit comments

Comments
 (0)