3737/** Client for interacting with OpenAI models. */
3838@ Slf4j
3939@ RequiredArgsConstructor (access = AccessLevel .PRIVATE )
40- public final class OpenAiClient {
40+ public final class OpenAiClient implements OpenAiClientWithSystemPrompt {
4141 private static final String DEFAULT_API_VERSION = "2024-02-01" ;
4242 static final ObjectMapper JACKSON = getDefaultObjectMapper ();
4343 @ Nullable private String systemPrompt = null ;
@@ -113,18 +113,12 @@ public static OpenAiClient withCustomDestination(@Nonnull final Destination dest
113113 * @return the client
114114 */
115115 @ Nonnull
116- public OpenAiClient withSystemPrompt (@ Nonnull final String systemPrompt ) {
116+ public OpenAiClientWithSystemPrompt withSystemPrompt (@ Nonnull final String systemPrompt ) {
117117 this .systemPrompt = systemPrompt ;
118118 return this ;
119119 }
120120
121- /**
122- * Generate a completion for the given user prompt.
123- *
124- * @param prompt a text message.
125- * @return the completion output
126- * @throws OpenAiClientException if the request fails
127- */
121+ @ Override
128122 @ Nonnull
129123 public OpenAiChatCompletionOutput chatCompletion (@ Nonnull final String prompt )
130124 throws OpenAiClientException {
@@ -146,35 +140,10 @@ public OpenAiChatCompletionOutput chatCompletion(@Nonnull final String prompt)
146140 @ Nonnull
147141 public OpenAiChatCompletionOutput chatCompletion (
148142 @ Nonnull final OpenAiChatCompletionParameters parameters ) throws OpenAiClientException {
149- warnIfUnsupportedUsage ();
150143 return execute ("/chat/completions" , parameters , OpenAiChatCompletionOutput .class );
151144 }
152145
153- /**
154- * Stream a completion for the given prompt. Returns a <b>lazily</b> populated stream of text
155- * chunks. To access more details about the individual chunks, use {@link
156- * #streamChatCompletionDeltas(OpenAiChatCompletionParameters)}.
157- *
158- * <p>The stream should be consumed using a try-with-resources block to ensure that the underlying
159- * HTTP connection is closed.
160- *
161- * <p>Example:
162- *
163- * <pre>{@code
164- * try (var stream = client.streamChatCompletion("...")) {
165- * stream.forEach(System.out::println);
166- * }
167- * }</pre>
168- *
169- * <p>Please keep in mind that using a terminal stream operation like {@link Stream#forEach} will
170- * block until all chunks are consumed. Also, for obvious reasons, invoking {@link
171- * Stream#parallel()} on this stream is not supported.
172- *
173- * @param prompt a text message.
174- * @return A stream of message deltas
175- * @throws OpenAiClientException if the request fails or if the finish reason is content_filter
176- * @see #streamChatCompletionDeltas(OpenAiChatCompletionParameters)
177- */
146+ @ Override
178147 @ Nonnull
179148 public Stream <String > streamChatCompletion (@ Nonnull final String prompt )
180149 throws OpenAiClientException {
@@ -225,18 +194,10 @@ private static void throwOnContentFilter(@Nonnull final OpenAiChatCompletionDelt
225194 @ Nonnull
226195 public Stream <OpenAiChatCompletionDelta > streamChatCompletionDeltas (
227196 @ Nonnull final OpenAiChatCompletionParameters parameters ) throws OpenAiClientException {
228- warnIfUnsupportedUsage ();
229197 parameters .enableStreaming ();
230198 return executeStream ("/chat/completions" , parameters , OpenAiChatCompletionDelta .class );
231199 }
232200
233- private void warnIfUnsupportedUsage () {
234- if (systemPrompt != null ) {
235- log .warn (
236- "Previously set messages will be ignored, set it as an argument of this method instead." );
237- }
238- }
239-
240201 /**
241202 * Get a vector representation of a given input that can be easily consumed by machine learning
242203 * models and algorithms.
0 commit comments