Skip to content

Commit f7b7717

Browse files
committed
Convert request and response classes to Value classes
1 parent db75ca7 commit f7b7717

File tree

6 files changed

+176
-70
lines changed

6 files changed

+176
-70
lines changed

foundation-models/openai/src/main/java/com/sap/ai/sdk/foundationmodels/openai/OpenAiChatCompletionDelta.java

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
package com.sap.ai.sdk.foundationmodels.openai;
22

3+
import static lombok.AccessLevel.NONE;
4+
35
import com.fasterxml.jackson.annotation.JsonCreator;
46
import com.fasterxml.jackson.databind.ObjectMapper;
57
import com.google.common.annotations.Beta;
@@ -10,17 +12,19 @@
1012
import java.util.Objects;
1113
import javax.annotation.Nonnull;
1214
import javax.annotation.Nullable;
13-
import lombok.Data;
1415
import lombok.RequiredArgsConstructor;
16+
import lombok.Setter;
17+
import lombok.Value;
1518

1619
/**
1720
* Represents an OpenAI chat completion output delta for streaming.
1821
*
1922
* @since 1.4.0
2023
*/
2124
@Beta
22-
@Data
25+
@Value
2326
@RequiredArgsConstructor(onConstructor_ = @JsonCreator)
27+
@Setter(value = NONE)
2428
public class OpenAiChatCompletionDelta implements StreamedDelta {
2529
/** The original response from the chat completion stream. */
2630
@Nonnull final CreateChatCompletionStreamResponse originalResponse;

foundation-models/openai/src/main/java/com/sap/ai/sdk/foundationmodels/openai/OpenAiChatCompletionRequest.java

Lines changed: 96 additions & 58 deletions
Original file line numberDiff line numberDiff line change
@@ -14,85 +14,92 @@
1414
import java.util.Map;
1515
import javax.annotation.Nonnull;
1616
import javax.annotation.Nullable;
17-
import lombok.Data;
18-
import lombok.experimental.Accessors;
17+
import lombok.AccessLevel;
18+
import lombok.AllArgsConstructor;
19+
import lombok.Getter;
20+
import lombok.Value;
21+
import lombok.With;
22+
import lombok.experimental.Tolerate;
1923

2024
/**
2125
* Represents a request for OpenAI chat completion, including conversation messages and parameters.
2226
*
2327
* @since 1.4.0
2428
*/
2529
@Beta
26-
@Accessors(fluent = true)
27-
@Data
30+
@Value
31+
@With
32+
@AllArgsConstructor(access = AccessLevel.PRIVATE)
33+
@Getter(value = AccessLevel.NONE)
2834
public class OpenAiChatCompletionRequest {
2935
/** List of messages from the conversation. */
30-
@Nonnull private final List<OpenAiMessage> messages = new ArrayList<>();
36+
@Nonnull List<OpenAiMessage> messages;
3137

3238
/** Stop sequences for the completion. */
33-
@Nullable private List<String> stop;
39+
@Nullable List<String> stop;
3440

3541
/** Temperature for the completion. */
36-
@Nullable private BigDecimal temperature;
42+
@Nullable BigDecimal temperature;
3743

3844
/** Top-p sampling parameter. */
39-
@Nullable private BigDecimal topP;
45+
@Nullable BigDecimal topP;
4046

4147
/** Whether to stream the completion. */
42-
@Nullable private Boolean stream;
48+
boolean stream;
4349

4450
/** Maximum number of tokens for the completion. */
45-
@Nullable private Integer maxTokens;
51+
@Nullable Integer maxTokens;
4652

4753
/** Maximum number of tokens for the completion response. */
48-
@Nullable private Integer maxCompletionTokens;
54+
@Nullable Integer maxCompletionTokens;
4955

5056
/** Presence penalty for the completion. */
51-
@Nullable private BigDecimal presencePenalty;
57+
@Nullable BigDecimal presencePenalty;
5258

5359
/** Frequency penalty for the completion. */
54-
@Nullable private BigDecimal frequencyPenalty;
60+
@Nullable BigDecimal frequencyPenalty;
5561

5662
/** Logit bias for the completion. */
57-
@Nullable private Map<String, Integer> logitBias;
63+
@Nullable Map<String, Integer> logitBias;
5864

5965
/** User identifier for the completion. */
60-
@Nullable private String user;
66+
@Nullable String user;
6167

6268
/** Whether to include log probabilities in the response. */
63-
@Nullable private Boolean logprobs;
69+
boolean logprobs;
6470

6571
/** Number of top log probabilities to include. */
66-
@Nullable private Integer topLogprobs;
72+
@Nullable Integer topLogprobs;
6773

6874
/** Number of completions to generate. */
69-
@Nullable private Integer n;
75+
@Nullable Integer n;
7076

7177
/** Whether to allow parallel tool calls. */
72-
@Nullable private Boolean parallelToolCalls;
78+
boolean parallelToolCalls;
7379

7480
/** Seed for random number generation. */
75-
@Nullable private Integer seed;
81+
@Nullable Integer seed;
7682

7783
/** Options for streaming the completion. */
78-
@Nullable private ChatCompletionStreamOptions streamOptions;
84+
@Nullable ChatCompletionStreamOptions streamOptions;
7985

8086
/** Response format for the completion. */
81-
@Nullable private CreateChatCompletionRequestAllOfResponseFormat responseFormat;
87+
@Nullable CreateChatCompletionRequestAllOfResponseFormat responseFormat;
8288

8389
/** List of tools for the completion. */
84-
@Nullable private List<ChatCompletionTool> tools;
90+
@Nullable List<ChatCompletionTool> tools;
8591

8692
/** Tool choice option for the completion. */
87-
@Nullable private ChatCompletionToolChoiceOption toolChoice;
93+
@Nullable ChatCompletionToolChoiceOption toolChoice;
8894

8995
/**
90-
* Creates an OpenAiChatCompletionPrompt with a single message.
96+
* Creates an OpenAiChatCompletionPrompt with string as user message.
9197
*
9298
* @param message the message to be added to the prompt
9399
*/
100+
@Tolerate
94101
public OpenAiChatCompletionRequest(@Nonnull final String message) {
95-
messages.add(OpenAiMessage.user(message));
102+
this(OpenAiMessage.user(message));
96103
}
97104

98105
/**
@@ -101,54 +108,85 @@ public OpenAiChatCompletionRequest(@Nonnull final String message) {
101108
* @param message the primary message to be added to the prompt
102109
* @param messages additional messages to be added to the prompt
103110
*/
111+
@Tolerate
104112
public OpenAiChatCompletionRequest(
105113
@Nonnull final OpenAiMessage message, @Nonnull final OpenAiMessage... messages) {
114+
// Keeps default values for boolean fields. @With introduces bug comparison of Boolean
115+
this(
116+
new ArrayList<>(),
117+
null,
118+
null,
119+
null,
120+
false,
121+
null,
122+
null,
123+
null,
124+
null,
125+
null,
126+
null,
127+
false,
128+
null,
129+
null,
130+
true,
131+
null,
132+
null,
133+
null,
134+
null,
135+
null);
136+
106137
this.messages.add(message);
107138
this.messages.addAll(Arrays.asList(messages));
108139
}
109140

110141
/**
111-
* Sets the stop sequences for the prompt.
142+
* Adds stop sequences to the request.
112143
*
113-
* @param values the stop sequences to be set
114-
* @return the current OpenAiChatCompletionPrompt instance
144+
* @param sequence the primary stop sequence
145+
* @param sequences additional stop sequences
146+
* @return a new OpenAiChatCompletionRequest instance with the specified stop sequences
115147
*/
148+
@Tolerate
116149
@Nonnull
117-
public OpenAiChatCompletionRequest stop(
118-
@Nonnull final String value, @Nonnull final String... values) {
119-
this.stop = new ArrayList<>();
150+
public OpenAiChatCompletionRequest withStop(
151+
@Nonnull final String sequence, @Nonnull final String... sequences) {
152+
final var allSequences = new ArrayList<String>();
120153

121-
this.stop.add(value);
122-
this.stop.addAll(Arrays.asList(values));
154+
allSequences.add(sequence);
155+
allSequences.addAll(Arrays.asList(sequences));
123156

124-
return this;
157+
return this.withStop(allSequences);
125158
}
126159

160+
/**
161+
* Converts the request to a generated model class CreateChatCompletionRequest.
162+
*
163+
* @return the CreateChatCompletionRequest
164+
*/
127165
CreateChatCompletionRequest toCreateChatCompletionRequest() {
128166
final var request = new CreateChatCompletionRequest();
129-
this.messages().forEach(message -> request.addMessagesItem(message.createDTO()));
130-
131-
request.stop(
132-
this.stop() != null ? CreateChatCompletionRequestAllOfStop.create(this.stop()) : null);
133-
134-
request.temperature(this.temperature());
135-
request.topP(this.topP());
136-
request.stream(this.stream());
137-
request.maxTokens(this.maxTokens());
138-
request.maxCompletionTokens(this.maxCompletionTokens());
139-
request.presencePenalty(this.presencePenalty());
140-
request.frequencyPenalty(this.frequencyPenalty());
141-
request.logitBias(this.logitBias());
142-
request.user(this.user());
143-
request.logprobs(this.logprobs());
144-
request.topLogprobs(this.topLogprobs());
145-
request.n(this.n());
146-
request.parallelToolCalls(this.parallelToolCalls());
147-
request.seed(this.seed());
148-
request.streamOptions(this.streamOptions());
149-
request.responseFormat(this.responseFormat());
150-
request.tools(this.tools());
151-
request.toolChoice(this.toolChoice());
167+
this.messages.forEach(message -> request.addMessagesItem(message.createDTO()));
168+
169+
request.stop(this.stop != null ? CreateChatCompletionRequestAllOfStop.create(this.stop) : null);
170+
171+
request.temperature(this.temperature);
172+
request.topP(this.topP);
173+
174+
request.stream(this.stream);
175+
request.maxTokens(this.maxTokens);
176+
request.maxCompletionTokens(this.maxCompletionTokens);
177+
request.presencePenalty(this.presencePenalty);
178+
request.frequencyPenalty(this.frequencyPenalty);
179+
request.logitBias(this.logitBias);
180+
request.user(this.user);
181+
request.logprobs(this.logprobs);
182+
request.topLogprobs(this.topLogprobs);
183+
request.n(this.n);
184+
request.parallelToolCalls(this.parallelToolCalls);
185+
request.seed(this.seed);
186+
request.streamOptions(this.streamOptions);
187+
request.responseFormat(this.responseFormat);
188+
request.tools(this.tools);
189+
request.toolChoice(this.toolChoice);
152190
request.functionCall(null);
153191
request.functions(null);
154192
return request;

foundation-models/openai/src/main/java/com/sap/ai/sdk/foundationmodels/openai/OpenAiChatCompletionResponse.java

Lines changed: 5 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package com.sap.ai.sdk.foundationmodels.openai;
22

33
import static com.sap.ai.sdk.foundationmodels.openai.generated.model.CreateChatCompletionResponseChoicesInner.FinishReasonEnum.CONTENT_FILTER;
4+
import static lombok.AccessLevel.NONE;
45
import static lombok.AccessLevel.PACKAGE;
56

67
import com.google.common.annotations.Beta;
@@ -9,17 +10,19 @@
910
import com.sap.ai.sdk.foundationmodels.openai.generated.model.CreateChatCompletionResponseChoicesInner;
1011
import java.util.Objects;
1112
import javax.annotation.Nonnull;
12-
import lombok.Data;
1313
import lombok.RequiredArgsConstructor;
14+
import lombok.Setter;
15+
import lombok.Value;
1416

1517
/**
1618
* Represents the output of an OpenAI chat completion. *
1719
*
1820
* @since 1.4.0
1921
*/
2022
@Beta
21-
@Data
23+
@Value
2224
@RequiredArgsConstructor(access = PACKAGE)
25+
@Setter(value = NONE)
2326
public class OpenAiChatCompletionResponse {
2427
/** The original response from the OpenAI API. */
2528
@Nonnull final CreateChatCompletionResponse originalResponse;

foundation-models/openai/src/test/java/com/sap/ai/sdk/foundationmodels/openai/NewOpenAiClientTest.java

Lines changed: 18 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -181,13 +181,16 @@ void chatCompletion() {
181181
equalToJson(
182182
"""
183183
{
184+
"stream": false,
184185
"messages" : [ {
185186
"content" : "You are a helpful AI",
186187
"role" : "system"
187188
}, {
188189
"content" : "Hello World! Why is this phrase so famous?",
189190
"role" : "user"
190-
} ]
191+
} ],
192+
"logprobs" : false,
193+
"parallel_tool_calls" : true
191194
}""")));
192195
}
193196

@@ -210,10 +213,13 @@ void history() {
210213
equalToJson(
211214
"""
212215
{
216+
"stream" : false,
213217
"messages" : [{
214218
"content" : "First message",
215219
"role" : "user"
216-
} ]
220+
}],
221+
"logprobs" : false,
222+
"parallel_tool_calls" : true
217223
}""")));
218224

219225
var response = client.chatCompletion(new OpenAiChatCompletionRequest("Second message"));
@@ -230,10 +236,13 @@ void history() {
230236
equalToJson(
231237
"""
232238
{
239+
"stream" : false,
233240
"messages" : [{
234241
"content" : "Second message",
235242
"role" : "user"
236-
} ]
243+
}],
244+
"logprobs" : false,
245+
"parallel_tool_calls" : true
237246
}""")));
238247
}
239248

@@ -500,8 +509,8 @@ void chatCompletionTool() {
500509
final var request =
501510
new OpenAiChatCompletionRequest(
502511
"A pair of rabbits is placed in a field. Each month, every pair produces one new pair, starting from the second month. How many rabbits will there be after 12 months?")
503-
.tools(List.of(tool))
504-
.toolChoice(toolChoice);
512+
.withTools(List.of(tool))
513+
.withToolChoice(toolChoice);
505514

506515
var response = client.chatCompletion(request).getOriginalResponse();
507516

@@ -535,6 +544,7 @@ void chatCompletionTool() {
535544
equalToJson(
536545
"""
537546
{
547+
"stream" : false,
538548
"messages" : [ {
539549
"content" : "A pair of rabbits is placed in a field. Each month, every pair produces one new pair, starting from the second month. How many rabbits will there be after 12 months?",
540550
"role" : "user"
@@ -559,7 +569,9 @@ void chatCompletionTool() {
559569
"function" : {
560570
"name" : "fibonacci"
561571
}
562-
}
572+
},
573+
"logprobs" : false,
574+
"parallel_tool_calls" : true
563575
}
564576
""")));
565577
}

0 commit comments

Comments
 (0)