Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
49 commits
Select commit Hold shift + click to select a range
c7da29d
Init
a-d Jan 8, 2025
f7fd3f8
Merge remote-tracking branch 'origin/main' into generated-openai
a-d Jan 8, 2025
1072172
Fix compilation; Minor test changes
a-d Jan 8, 2025
8554091
Minor fixes
a-d Jan 9, 2025
483db01
Formatting
bot-sdk-js Jan 9, 2025
bbc0146
Fixes
a-d Jan 9, 2025
433d84a
Add static code check exclusions
a-d Jan 9, 2025
4bc431b
Format; Remove redundant files; Fix test
a-d Jan 9, 2025
d8dd4c0
Merge remote-tracking branch 'origin/generated-openai' into generated…
a-d Jan 9, 2025
0620966
Change tests; Change threshold
a-d Jan 10, 2025
cc7e833
Fix sample application; Improve code quality; Format
a-d Jan 10, 2025
753cbc4
Formatting
bot-sdk-js Jan 10, 2025
8df9ef8
Merge branch 'main' into generated-openai
newtork Jan 13, 2025
c5d45b8
Fix unit test
a-d Jan 13, 2025
7db4500
Merge remote-tracking branch 'origin/main' into generated-openai
a-d Jan 15, 2025
4ebc2a3
Formatting
bot-sdk-js Jan 15, 2025
0a24719
move jackson config to static initializer
rpanackal Jan 20, 2025
e59b6fa
Remove necessary mixins
rpanackal Jan 20, 2025
4fae0fc
Add convenience api similar to orchestration (wip)
rpanackal Jan 21, 2025
38c972a
Manual change on StreamResponse's enum
rpanackal Jan 21, 2025
eb4a6f6
OpenAiChatCompletionDelta now contains CreateChatCompletionStreamResp…
rpanackal Jan 21, 2025
b67d811
Merge remote-tracking branch 'refs/remotes/origin/main' into chore/st…
rpanackal Jan 23, 2025
e83d5e9
Introduce convenience request, response and error object
rpanackal Jan 27, 2025
697f9bf
Merge remote-tracking branch 'refs/remotes/origin/main' into chore/st…
rpanackal Jan 27, 2025
0973197
Formatting
bot-sdk-js Jan 27, 2025
ff7eeb4
Minimize constructors exposed for response class
rpanackal Jan 27, 2025
a3f972b
Merge remote-tracking branch 'origin/chore/stable-conv-generated-open…
rpanackal Jan 27, 2025
a85c1db
Change call chaining in chat completion
rpanackal Jan 27, 2025
985dc4d
Add convenience for prompt and config
rpanackal Jan 30, 2025
77ce51d
Minimize TODOs
rpanackal Jan 30, 2025
46a3817
Remove main merge for easy review
rpanackal Jan 31, 2025
82aacd9
Formatting
bot-sdk-js Jan 31, 2025
6bafedb
Include embedding convenience
rpanackal Jan 31, 2025
35859c0
Include embedding convenience
rpanackal Jan 31, 2025
13b223c
Change OpenAiChatCompletionOutput access and OpenAiChatCompletionProm…
rpanackal Jan 31, 2025
a1e6323
Make fields final where @Value is now @Data
rpanackal Jan 31, 2025
1c632e1
Merge remote-tracking branch 'origin/chore/stable-conv-generated-open…
rpanackal Jan 31, 2025
e637ab6
PMD suggestions
rpanackal Jan 31, 2025
9c52ab6
Merge remote-tracking branch 'refs/remotes/origin/main' into chore/st…
rpanackal Jan 31, 2025
b412d96
Fix error in low level api usage (Sample app)
rpanackal Jan 31, 2025
b9c6052
Manually add Beta annotation to generated code
a-d Jan 31, 2025
5f8d64b
Merge remote-tracking branch 'origin/chore/stable-conv-generated-open…
a-d Jan 31, 2025
9b7ad42
Revert embedding convenience.
rpanackal Feb 3, 2025
b946c5c
Move from static factory to constructor based initialization in OpenA…
rpanackal Feb 3, 2025
334ed8b
Combine prompt and config class into request class
rpanackal Feb 3, 2025
f949fb2
Include deprecated code with tests
rpanackal Feb 5, 2025
7fef719
Minor cosmetic fix
rpanackal Feb 5, 2025
0547ce6
low-level chatCompletion returns low-level response and change OpenAi…
rpanackal Feb 5, 2025
666f4ef
Merge remote-tracking branch 'refs/remotes/origin/main' into chore/st…
rpanackal Feb 6, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 3 additions & 0 deletions .pipeline/checkstyle-suppressions.xml
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,10 @@
<!-- Suppress generated clients -->
<suppress files="[/\\]core[/\\]client[/\\]" checks=".*"/>
<suppress files="[/\\]core[/\\]model[/\\]" checks=".*"/>
<suppress files="[/\\]openai[/\\]model2[/\\]" checks=".*"/>
<suppress files="[/\\]orchestration[/\\]model[/\\]" checks=".*"/>
<!-- Suppress TODOs -->
<suppress files="OpenAiChatMessage.java" checks="TodoComment" lines="257,7" />
<suppress files="ChatCompletionResponseMessage.java" checks="TodoComment" lines="53,34" />
<suppress files="CreateChatCompletionRequest.java" checks="TodoComment" lines="73,47" />
</suppressions>
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
package com.sap.ai.sdk.foundationmodels.openai;

import com.fasterxml.jackson.annotation.JsonTypeInfo;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
import com.sap.ai.sdk.foundationmodels.openai.model2.CreateChatCompletionStreamResponse;
import lombok.AccessLevel;
import lombok.NoArgsConstructor;

@NoArgsConstructor(access = AccessLevel.PRIVATE)
final class JacksonMixins {
@JsonTypeInfo(use = JsonTypeInfo.Id.NONE)
@JsonDeserialize(as = CreateChatCompletionStreamResponse.class)
public interface DefaultChatCompletionCreate200ResponseMixIn {}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
package com.sap.ai.sdk.foundationmodels.openai;

import com.sap.ai.sdk.foundationmodels.openai.model2.ChatCompletionRequestAssistantMessage;
import com.sap.ai.sdk.foundationmodels.openai.model2.ChatCompletionRequestAssistantMessageContent;
import javax.annotation.Nonnull;
import lombok.Value;
import lombok.experimental.Accessors;

/** Represents a chat message as 'assistant' to OpenAI service. */
@Value
@Accessors(fluent = true)
class OpenAiAssistantMessage implements OpenAiMessage {

/** The role of the message. */
@Nonnull String role = "assistant";

/** The content of the message. */
@Nonnull String content;

/**
* Converts the message to a serializable object.
*
* @return the corresponding {@code ChatCompletionRequestAssistantMessage} object.
*/
@Nonnull
public ChatCompletionRequestAssistantMessage createDTO() {
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

(Comment/Minor)

I understand you are mirroring the Orchestration API design, as suggested. However, I would like to challenge this method (visibility) here. We don't really want our convenience-users to access this method. While I can't challenge it in orchestration anymore (due to stability), maybe we can explore other options here in the new API.

Can be done in a separate PR / ticket.

return new ChatCompletionRequestAssistantMessage()
.role(ChatCompletionRequestAssistantMessage.RoleEnum.fromValue(role()))
.content(ChatCompletionRequestAssistantMessageContent.create(content));
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,74 @@
package com.sap.ai.sdk.foundationmodels.openai;

import com.fasterxml.jackson.annotation.JsonCreator;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.annotations.Beta;
import com.sap.ai.sdk.core.common.StreamedDelta;
import com.sap.ai.sdk.foundationmodels.openai.model2.CompletionUsage;
import com.sap.ai.sdk.foundationmodels.openai.model2.CreateChatCompletionStreamResponse;
import java.util.Map;
import java.util.Objects;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.Data;
import lombok.RequiredArgsConstructor;

/**
* Represents an OpenAI chat completion output delta for streaming.
*
* @since 1.3.0
*/
@Beta
@Data
@RequiredArgsConstructor(onConstructor_ = @JsonCreator)
public class OpenAiChatCompletionDelta implements StreamedDelta {
/** The original response from the chat completion stream. */
@Nonnull final CreateChatCompletionStreamResponse originalResponse;

/**
* Retrieves the delta content from the original response.
*
* @return The delta content as a string, or an empty string if not available.
*/
@Nonnull
@Override
public String getDeltaContent() {
final var choices = getOriginalResponse().getChoices();
if (!choices.isEmpty() && choices.get(0).getIndex() == 0) {
final var message = choices.get(0).getDelta();
return Objects.requireNonNullElse(message.getContent(), "");
}
return "";
}

/**
* Retrieves the finish reason from the original response.
*
* @return The finish reason as a string, or null if not available.
*/
@Nullable
@Override
public String getFinishReason() {
final var choices = getOriginalResponse().getChoices();
if (!choices.isEmpty()) {
final var finishReason = choices.get(0).getFinishReason();
return finishReason != null ? finishReason.getValue() : null;
}
return null;
}

/**
* Retrieves the completion usage from the response, or null if it is not available.
*
* @param objectMapper The object mapper to use for conversion.
* @return The completion usage or null.
*/
@Nullable
public CompletionUsage getCompletionUsage(@Nonnull final ObjectMapper objectMapper) {
if (getOriginalResponse().getCustomFieldNames().contains("usage")
&& getOriginalResponse().getCustomField("usage") instanceof Map<?, ?> usage) {
return objectMapper.convertValue(usage, CompletionUsage.class);
}
return null;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,152 @@
package com.sap.ai.sdk.foundationmodels.openai;

import com.sap.ai.sdk.foundationmodels.openai.model2.ChatCompletionStreamOptions;
import com.sap.ai.sdk.foundationmodels.openai.model2.ChatCompletionTool;
import com.sap.ai.sdk.foundationmodels.openai.model2.ChatCompletionToolChoiceOption;
import com.sap.ai.sdk.foundationmodels.openai.model2.CreateChatCompletionRequest;
import com.sap.ai.sdk.foundationmodels.openai.model2.CreateChatCompletionRequestAllOfResponseFormat;
import com.sap.ai.sdk.foundationmodels.openai.model2.CreateChatCompletionRequestAllOfStop;
import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import lombok.Data;
import lombok.experimental.Accessors;

/**
* Represents a request for OpenAI chat completion, including conversation messages and parameters.
*/
@Accessors(fluent = true)
@Data
public class OpenAiChatCompletionRequest {
/** List of messages from the conversation. */
@Nonnull private final List<OpenAiMessage> messages = new ArrayList<>();

/** Stop sequences for the completion. */
@Nullable private List<String> stop;

/** Temperature for the completion. */
@Nullable private BigDecimal temperature;

/** Top-p sampling parameter. */
@Nullable private BigDecimal topP;

/** Whether to stream the completion. */
@Nullable private Boolean stream;

/** Maximum number of tokens for the completion. */
@Nullable private Integer maxTokens;

/** Maximum number of tokens for the completion response. */
@Nullable private Integer maxCompletionTokens;

/** Presence penalty for the completion. */
@Nullable private BigDecimal presencePenalty;

/** Frequency penalty for the completion. */
@Nullable private BigDecimal frequencyPenalty;

/** Logit bias for the completion. */
@Nullable private Map<String, Integer> logitBias;

/** User identifier for the completion. */
@Nullable private String user;

/** Whether to include log probabilities in the response. */
@Nullable private Boolean logprobs;

/** Number of top log probabilities to include. */
@Nullable private Integer topLogprobs;

/** Number of completions to generate. */
@Nullable private Integer n;

/** Whether to allow parallel tool calls. */
@Nullable private Boolean parallelToolCalls;

/** Seed for random number generation. */
@Nullable private Integer seed;

/** Options for streaming the completion. */
@Nullable private ChatCompletionStreamOptions streamOptions;

/** Response format for the completion. */
@Nullable private CreateChatCompletionRequestAllOfResponseFormat responseFormat;

/** List of tools for the completion. */
@Nullable private List<ChatCompletionTool> tools;

/** Tool choice option for the completion. */
@Nullable private ChatCompletionToolChoiceOption toolChoice;

/**
* Creates an OpenAiChatCompletionPrompt with a single message.
*
* @param message the message to be added to the prompt
*/
public OpenAiChatCompletionRequest(@Nonnull final String message) {
messages.add(OpenAiMessage.user(message));
}

/**
* Creates an OpenAiChatCompletionPrompt with a multiple unpacked messages.
*
* @param message the primary message to be added to the prompt
* @param messages additional messages to be added to the prompt
*/
public OpenAiChatCompletionRequest(
@Nonnull final OpenAiMessage message, @Nonnull final OpenAiMessage... messages) {
this.messages.add(message);
this.messages.addAll(Arrays.asList(messages));
}

/**
* Sets the stop sequences for the prompt.
*
* @param values the stop sequences to be set
* @return the current OpenAiChatCompletionPrompt instance
*/
@Nonnull
public OpenAiChatCompletionRequest stop(
@Nonnull final String value, @Nonnull final String... values) {
this.stop = new ArrayList<>();

this.stop.add(value);
this.stop.addAll(Arrays.asList(values));

return this;
}

CreateChatCompletionRequest toCreateChatCompletionRequest() {
final var request = new CreateChatCompletionRequest();
this.messages().forEach(message -> request.addMessagesItem(message.createDTO()));

request.stop(
this.stop() != null ? CreateChatCompletionRequestAllOfStop.create(this.stop()) : null);

request.temperature(this.temperature());
request.topP(this.topP());
request.stream(this.stream());
request.maxTokens(this.maxTokens());
request.maxCompletionTokens(this.maxCompletionTokens());
request.presencePenalty(this.presencePenalty());
request.frequencyPenalty(this.frequencyPenalty());
request.logitBias(this.logitBias());
request.user(this.user());
request.logprobs(this.logprobs());
request.topLogprobs(this.topLogprobs());
request.n(this.n());
request.parallelToolCalls(this.parallelToolCalls());
request.seed(this.seed());
request.streamOptions(this.streamOptions());
request.responseFormat(this.responseFormat());
request.tools(this.tools());
request.toolChoice(this.toolChoice());
request.functionCall(null);
request.functions(null);
return request;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
package com.sap.ai.sdk.foundationmodels.openai;

import static com.sap.ai.sdk.foundationmodels.openai.model2.CreateChatCompletionResponseChoicesInner.FinishReasonEnum.CONTENT_FILTER;
import static lombok.AccessLevel.PACKAGE;

import com.sap.ai.sdk.foundationmodels.openai.model2.CompletionUsage;
import com.sap.ai.sdk.foundationmodels.openai.model2.CreateChatCompletionResponse;
import com.sap.ai.sdk.foundationmodels.openai.model2.CreateChatCompletionResponseChoicesInner;
import java.util.Objects;
import javax.annotation.Nonnull;
import lombok.Data;
import lombok.RequiredArgsConstructor;

/** Represents the output of an OpenAI chat completion. */
@Data
@RequiredArgsConstructor(access = PACKAGE)
public class OpenAiChatCompletionResponse {
/** The original response from the OpenAI API. */
@Nonnull final CreateChatCompletionResponse originalResponse;

/**
* Gets the token usage from the original response.
*
* @return the token usage
*/
@Nonnull
public CompletionUsage getTokenUsage() {
return getOriginalResponse().getUsage();
}

/**
* Gets the first choice from the original response.
*
* @return the first choice
*/
@Nonnull
public CreateChatCompletionResponseChoicesInner getChoice() {
return getOriginalResponse().getChoices().get(0);
}

/**
* Gets the content of the first choice.
*
* @return the content of the first choice
* @throws OpenAiClientException if the content is filtered by the content filter
*/
@Nonnull
public String getContent() {
if (CONTENT_FILTER.equals(getOriginalResponse().getChoices().get(0).getFinishReason())) {
throw new OpenAiClientException("Content filter filtered the output.");
}

return Objects.requireNonNullElse(getChoice().getMessage().getContent(), "");
}
}
Loading