diff --git a/.release-please-manifest.json b/.release-please-manifest.json
index 56b9e502d..b4e9013ba 100644
--- a/.release-please-manifest.json
+++ b/.release-please-manifest.json
@@ -1,3 +1,3 @@
{
- ".": "0.15.1"
+ ".": "0.16.0"
}
\ No newline at end of file
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 0bef6d132..6721301d1 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,24 @@
# Changelog
+## 0.16.0 (2025-01-28)
+
+Full Changelog: [v0.15.1...v0.16.0](https://github.com/openai/openai-java/compare/v0.15.1...v0.16.0)
+
+### Features
+
+* **client:** add some more builder helpers ([#173](https://github.com/openai/openai-java/issues/173)) ([f314e68](https://github.com/openai/openai-java/commit/f314e68915560703970c9acc16e96030202de9d1))
+
+
+### Chores
+
+* **internal:** shorten `model` method impl ([f314e68](https://github.com/openai/openai-java/commit/f314e68915560703970c9acc16e96030202de9d1))
+
+
+### Documentation
+
+* mark more methods as deprecated ([f314e68](https://github.com/openai/openai-java/commit/f314e68915560703970c9acc16e96030202de9d1))
+* more examples and cleanup ([#159](https://github.com/openai/openai-java/issues/159)) ([fa2a1fc](https://github.com/openai/openai-java/commit/fa2a1fc2983131bac40988049dd3b0e7c730442c))
+
## 0.15.1 (2025-01-27)
Full Changelog: [v0.15.0...v0.15.1](https://github.com/openai/openai-java/compare/v0.15.0...v0.15.1)
diff --git a/README.md b/README.md
index 1aea4b99c..b83e5ab3b 100644
--- a/README.md
+++ b/README.md
@@ -9,8 +9,8 @@
-[](https://central.sonatype.com/artifact/com.openai/openai-java/0.15.1)
-[](https://javadoc.io/doc/com.openai/openai-java/0.15.1)
+[](https://central.sonatype.com/artifact/com.openai/openai-java/0.16.0)
+[](https://javadoc.io/doc/com.openai/openai-java/0.16.0)
@@ -25,7 +25,7 @@ The REST API documentation can be found on [platform.openai.com](https://platfor
### Gradle
```kotlin
-implementation("com.openai:openai-java:0.15.1")
+implementation("com.openai:openai-java:0.16.0")
```
### Maven
@@ -34,7 +34,7 @@ implementation("com.openai:openai-java:0.15.1")
com.openai
openai-java
- 0.15.1
+ 0.16.0
```
@@ -46,6 +46,8 @@ This library requires Java 8 or later.
## Usage
+See the [`openai-java-example`](openai-java-example/src/main/java/com/openai/example) directory for complete and runnable examples.
+
### Configure the client
Use `OpenAIOkHttpClient.builder()` to configure the client. At a minimum you need to set `.apiKey()`:
@@ -290,41 +292,19 @@ This library throws exceptions in a single hierarchy for easy handling:
## Microsoft Azure OpenAI
To use this library with [Azure OpenAI](https://learn.microsoft.com/azure/ai-services/openai/overview), use the same
-OpenAI client builder but with the Azure-specific configuration.
+OpenAI client builder but with the Azure-specific configuration.
```java
-OpenAIOkHttpClient.Builder clientBuilder = OpenAIOkHttpClient.builder();
-
-/* Azure-specific code starts here */
-// You can either set 'endpoint' directly in the builder.
-// or set the env var "AZURE_OPENAI_ENDPOINT" and use fromEnv() method instead
-clientBuilder
- .baseUrl(System.getenv("AZURE_OPENAI_ENDPOINT"))
- .credential(BearerTokenCredential.create(
- AuthenticationUtil.getBearerTokenSupplier(
- new DefaultAzureCredentialBuilder().build(), "https://cognitiveservices.azure.com/.default")
- ));
-/* Azure-specific code ends here */
-
-OpenAIClient client = clientBuilder.build();
-
-ChatCompletionCreateParams params = ChatCompletionCreateParams.builder()
- .addMessage(ChatCompletionMessageParam.ofChatCompletionUserMessageParam(
- ChatCompletionUserMessageParam.builder()
- .content(ChatCompletionUserMessageParam.Content.ofTextContent("Who won the world series in 2020?"))
- .build()))
- .model("gpt-4o")
- .build();
-
-ChatCompletion chatCompletion = client.chat().completions().create(params);
-
-List choices = chatCompletion.choices();
-for (ChatCompletion.Choice choice : choices) {
- System.out.println("Choice content: " + choice.message().content().get());
-}
+OpenAIClient client = OpenAIOkHttpClient.builder()
+ // Gets the API key from the `AZURE_OPENAI_KEY` environment variable
+ .fromEnv()
+ // Set the Azure Entra ID
+ .credential(BearerTokenCredential.create(AuthenticationUtil.getBearerTokenSupplier(
+ new DefaultAzureCredentialBuilder().build(), "https://cognitiveservices.azure.com/.default")))
+ .build();
```
-See the complete Azure OpenAI examples in the [Azure OpenAI example](https://github.com/openai/openai-java/tree/next/openai-azure-java-example/src/main/java/com.openai.azure.examples).
+See the complete Azure OpenAI example in the [`openai-java-example`](openai-java-example/src/main/java/com/openai/example/AzureEntraIdExample.java) directory. The other examples in the directory also work with Azure as long as the client is configured to use it.
## Network options
diff --git a/build.gradle.kts b/build.gradle.kts
index acb2afc73..a19523d92 100644
--- a/build.gradle.kts
+++ b/build.gradle.kts
@@ -8,7 +8,7 @@ repositories {
allprojects {
group = "com.openai"
- version = "0.15.1" // x-release-please-version
+ version = "0.16.0" // x-release-please-version
}
subprojects {
diff --git a/examples/.keep b/examples/.keep
deleted file mode 100644
index d8c73e937..000000000
--- a/examples/.keep
+++ /dev/null
@@ -1,4 +0,0 @@
-File generated from our OpenAPI spec by Stainless.
-
-This directory can be used to store example files demonstrating usage of this SDK.
-It is ignored by Stainless code generation and its content (other than this keep file) won't be touched.
\ No newline at end of file
diff --git a/openai-azure-java-example/build.gradle.kts b/openai-azure-java-example/build.gradle.kts
deleted file mode 100644
index 1993f437e..000000000
--- a/openai-azure-java-example/build.gradle.kts
+++ /dev/null
@@ -1,12 +0,0 @@
-plugins {
- `java-library`
-}
-
-repositories {
- gradlePluginPortal()
-}
-
-dependencies {
- api(project(":openai-java-client-okhttp"))
- api("com.azure:azure-identity:1.14.0")
-}
diff --git a/openai-azure-java-example/src/main/java/com.openai.azure.examples/AzureApiKeyExample.java b/openai-azure-java-example/src/main/java/com.openai.azure.examples/AzureApiKeyExample.java
deleted file mode 100644
index 5ebe02832..000000000
--- a/openai-azure-java-example/src/main/java/com.openai.azure.examples/AzureApiKeyExample.java
+++ /dev/null
@@ -1,51 +0,0 @@
-package com.openai.azure.examples;
-
-import com.openai.azure.credential.AzureApiKeyCredential;
-import com.openai.client.OpenAIClient;
-import com.openai.client.okhttp.OpenAIOkHttpClient;
-import com.openai.core.JsonValue;
-import com.openai.models.ChatCompletion;
-import com.openai.models.ChatCompletionCreateParams;
-import com.openai.models.ChatCompletionMessageParam;
-import com.openai.models.ChatCompletionUserMessageParam;
-import java.util.List;
-
-/**
- * This example demonstrates how to use the Azure API key to authenticate with the OpenAI API.
- */
-public final class AzureApiKeyExample {
- private AzureApiKeyExample() {}
-
- public static void main(String[] args) {
- OpenAIOkHttpClient.Builder clientBuilder = OpenAIOkHttpClient.builder();
-
- /* Azure-specific code starts here */
- // You can either set 'endpoint' or 'apiKey' directly in the builder.
- // or set same two env vars and use fromEnv() method instead
- clientBuilder
- .baseUrl(System.getenv("AZURE_OPENAI_ENDPOINT"))
- .credential(AzureApiKeyCredential.create(System.getenv("AZURE_OPENAI_KEY")));
- /* Azure-specific code ends here */
-
- // All code from this line down is general-purpose OpenAI code
- OpenAIClient client = clientBuilder.build();
-
- ChatCompletionCreateParams params = ChatCompletionCreateParams.builder()
- .addMessage(
- ChatCompletionUserMessageParam.builder()
- .content("Who won the world series in 2020?")
- .build())
- .model("gpt-4o")
- .build();
-
- ChatCompletion chatCompletion = client.chat().completions().create(params);
-
- List choices = chatCompletion.choices();
- for (ChatCompletion.Choice choice : choices) {
- System.out.println("Choice content: " + choice.message().content().get());
- }
-
- JsonValue filterResult = chatCompletion._additionalProperties().get("prompt_filter_results");
- System.out.println("Content filter results: " + filterResult);
- }
-}
diff --git a/openai-azure-java-example/src/main/java/com.openai.azure.examples/AzureApiKeyExampleAsync.java b/openai-azure-java-example/src/main/java/com.openai.azure.examples/AzureApiKeyExampleAsync.java
deleted file mode 100644
index c567697ef..000000000
--- a/openai-azure-java-example/src/main/java/com.openai.azure.examples/AzureApiKeyExampleAsync.java
+++ /dev/null
@@ -1,53 +0,0 @@
-package com.openai.azure.examples;
-
-import com.openai.azure.credential.AzureApiKeyCredential;
-import com.openai.client.OpenAIClientAsync;
-import com.openai.client.okhttp.OpenAIOkHttpClientAsync;
-import com.openai.core.JsonValue;
-import com.openai.models.ChatCompletion;
-import com.openai.models.ChatCompletionCreateParams;
-import com.openai.models.ChatCompletionMessageParam;
-import com.openai.models.ChatCompletionUserMessageParam;
-import java.util.List;
-
-/**
- * This example demonstrates how to use the Azure API key to authenticate with the OpenAI API, asynchronously.
- */
-public final class AzureApiKeyExampleAsync {
-
- private AzureApiKeyExampleAsync() {}
-
- public static void main(String[] args) {
- OpenAIOkHttpClientAsync.Builder asyncClientBuilder = OpenAIOkHttpClientAsync.builder();
-
- /* Azure-specific code starts here */
- // You can either set 'endpoint' or 'apiKey' directly in the builder.
- // or set same two env vars and use fromEnv() method instead
- asyncClientBuilder
- .baseUrl(System.getenv("AZURE_OPENAI_ENDPOINT"))
- .credential(AzureApiKeyCredential.create(System.getenv("AZURE_OPENAI_KEY")));
- /* Azure-specific code ends here */
-
- // All code from this line down is general-purpose OpenAI code
- OpenAIClientAsync client = asyncClientBuilder.build();
-
- ChatCompletionCreateParams params = ChatCompletionCreateParams.builder()
- .addMessage(
- ChatCompletionUserMessageParam.builder()
- .content("Who won the world series in 2020?")
- .build())
- .model("gpt-4o")
- .build();
-
- ChatCompletion chatCompletion =
- client.chat().completions().create(params).join();
-
- List choices = chatCompletion.choices();
- for (ChatCompletion.Choice choice : choices) {
- System.out.println("Choice content: " + choice.message().content().get());
- }
-
- JsonValue filterResult = chatCompletion._additionalProperties().get("prompt_filter_results");
- System.out.println("Content filter results: " + filterResult);
- }
-}
diff --git a/openai-azure-java-example/src/main/java/com.openai.azure.examples/AzureEntraIDExample.java b/openai-azure-java-example/src/main/java/com.openai.azure.examples/AzureEntraIDExample.java
deleted file mode 100644
index a7cc2cfad..000000000
--- a/openai-azure-java-example/src/main/java/com.openai.azure.examples/AzureEntraIDExample.java
+++ /dev/null
@@ -1,57 +0,0 @@
-package com.openai.azure.examples;
-
-import com.azure.identity.AuthenticationUtil;
-import com.azure.identity.DefaultAzureCredentialBuilder;
-import com.openai.client.OpenAIClient;
-import com.openai.client.okhttp.OpenAIOkHttpClient;
-import com.openai.core.JsonValue;
-import com.openai.credential.BearerTokenCredential;
-import com.openai.models.ChatCompletion;
-import com.openai.models.ChatCompletionCreateParams;
-import com.openai.models.ChatCompletionMessageParam;
-import com.openai.models.ChatCompletionUserMessageParam;
-import java.util.List;
-
-/**
- * This example demonstrates how to use the Azure Entra ID to authenticate with the OpenAI API.
- */
-public final class AzureEntraIDExample {
-
- private AzureEntraIDExample() {}
-
- public static void main(String[] args) {
- OpenAIOkHttpClient.Builder clientBuilder = OpenAIOkHttpClient.builder();
-
- /* Azure-specific code starts here */
- // You can either set 'endpoint' directly in the builder.
- // or set the env var "AZURE_OPENAI_ENDPOINT" and use fromEnv() method instead
- clientBuilder
- .baseUrl(System.getenv("AZURE_OPENAI_ENDPOINT"))
- .credential(BearerTokenCredential.create(
- AuthenticationUtil.getBearerTokenSupplier(
- new DefaultAzureCredentialBuilder().build(), "https://cognitiveservices.azure.com/.default")
- ));
- /* Azure-specific code ends here */
-
- // All code from this line down is general-purpose OpenAI code
- OpenAIClient client = clientBuilder.build();
-
- ChatCompletionCreateParams params = ChatCompletionCreateParams.builder()
- .addMessage(
- ChatCompletionUserMessageParam.builder()
- .content("Who won the world series in 2020?")
- .build())
- .model("gpt-4o")
- .build();
-
- ChatCompletion chatCompletion = client.chat().completions().create(params);
-
- List choices = chatCompletion.choices();
- for (ChatCompletion.Choice choice : choices) {
- System.out.println("Choice content: " + choice.message().content().get());
- }
-
- JsonValue filterResult = chatCompletion._additionalProperties().get("prompt_filter_results");
- System.out.println("Content filter results: " + filterResult);
- }
-}
diff --git a/openai-azure-java-example/src/main/java/com.openai.azure.examples/AzureEntraIDExampleAsync.java b/openai-azure-java-example/src/main/java/com.openai.azure.examples/AzureEntraIDExampleAsync.java
deleted file mode 100644
index b557e2337..000000000
--- a/openai-azure-java-example/src/main/java/com.openai.azure.examples/AzureEntraIDExampleAsync.java
+++ /dev/null
@@ -1,58 +0,0 @@
-package com.openai.azure.examples;
-
-import com.azure.identity.AuthenticationUtil;
-import com.azure.identity.DefaultAzureCredentialBuilder;
-import com.openai.client.OpenAIClientAsync;
-import com.openai.client.okhttp.OpenAIOkHttpClientAsync;
-import com.openai.core.JsonValue;
-import com.openai.credential.BearerTokenCredential;
-import com.openai.models.ChatCompletion;
-import com.openai.models.ChatCompletionCreateParams;
-import com.openai.models.ChatCompletionMessageParam;
-import com.openai.models.ChatCompletionUserMessageParam;
-import java.util.List;
-
-/**
- * This example demonstrates how to use the Azure Entra ID to authenticate with the OpenAI API, asynchronously.
- */
-public final class AzureEntraIDExampleAsync {
-
- private AzureEntraIDExampleAsync() {}
-
- public static void main(String[] args) {
- OpenAIOkHttpClientAsync.Builder asyncClientBuilder = OpenAIOkHttpClientAsync.builder();
-
- /* Azure-specific code starts here */
- // You can either set 'endpoint' directly in the builder.
- // or set the env var "AZURE_OPENAI_ENDPOINT" and use fromEnv() method instead
- asyncClientBuilder
- .baseUrl(System.getenv("AZURE_OPENAI_ENDPOINT"))
- .credential(BearerTokenCredential.create(
- AuthenticationUtil.getBearerTokenSupplier(
- new DefaultAzureCredentialBuilder().build(), "https://cognitiveservices.azure.com/.default")
- ));
- /* Azure-specific code ends here */
-
- // All code from this line down is general-purpose OpenAI code
- OpenAIClientAsync asyncClient = asyncClientBuilder.build();
-
- ChatCompletionCreateParams params = ChatCompletionCreateParams.builder()
- .addMessage(
- ChatCompletionUserMessageParam.builder()
- .content("Who won the world series in 2020?")
- .build())
- .model("gpt-4o")
- .build();
-
- ChatCompletion chatCompletion =
- asyncClient.chat().completions().create(params).join();
-
- List choices = chatCompletion.choices();
- for (ChatCompletion.Choice choice : choices) {
- System.out.println("Choice content: " + choice.message().content().get());
- }
-
- JsonValue filterResult = chatCompletion._additionalProperties().get("prompt_filter_results");
- System.out.println("Content filter results: " + filterResult);
- }
-}
diff --git a/openai-java-core/src/main/kotlin/com/openai/models/BetaAssistantCreateParams.kt b/openai-java-core/src/main/kotlin/com/openai/models/BetaAssistantCreateParams.kt
index 8507230c6..0c44bb42d 100644
--- a/openai-java-core/src/main/kotlin/com/openai/models/BetaAssistantCreateParams.kt
+++ b/openai-java-core/src/main/kotlin/com/openai/models/BetaAssistantCreateParams.kt
@@ -468,7 +468,7 @@ private constructor(
* all of your available models, or see our
* [Model overview](https://platform.openai.com/docs/models) for descriptions of them.
*/
- fun model(value: String) = apply { model(ChatModel.of(value)) }
+ fun model(value: String) = model(ChatModel.of(value))
/** The description of the assistant. The maximum length is 512 characters. */
fun description(description: String?) = description(JsonField.ofNullable(description))
diff --git a/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadCreateAndRunParams.kt b/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadCreateAndRunParams.kt
index 1ec100a66..9ceadfeaf 100644
--- a/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadCreateAndRunParams.kt
+++ b/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadCreateAndRunParams.kt
@@ -822,7 +822,7 @@ private constructor(
* associated with the assistant. If not, the model associated with the assistant will
* be used.
*/
- fun model(value: String) = apply { model(ChatModel.of(value)) }
+ fun model(value: String) = model(ChatModel.of(value))
/**
* Whether to enable
@@ -2728,6 +2728,9 @@ private constructor(
fun addTool(codeInterpreter: CodeInterpreterTool) =
addTool(Tool.ofCodeInterpreter(codeInterpreter))
+ /** The tools to add this file to. */
+ fun addToolFileSearch() = addTool(Tool.ofFileSearch())
+
fun additionalProperties(additionalProperties: Map) = apply {
this.additionalProperties.clear()
putAllAdditionalProperties(additionalProperties)
diff --git a/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadCreateParams.kt b/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadCreateParams.kt
index 84f863538..cf8895e7a 100644
--- a/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadCreateParams.kt
+++ b/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadCreateParams.kt
@@ -1057,6 +1057,9 @@ private constructor(
fun addTool(codeInterpreter: CodeInterpreterTool) =
addTool(Tool.ofCodeInterpreter(codeInterpreter))
+ /** The tools to add this file to. */
+ fun addToolFileSearch() = addTool(Tool.ofFileSearch())
+
fun additionalProperties(additionalProperties: Map) = apply {
this.additionalProperties.clear()
putAllAdditionalProperties(additionalProperties)
diff --git a/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadMessageCreateParams.kt b/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadMessageCreateParams.kt
index 931bae997..5670491e4 100644
--- a/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadMessageCreateParams.kt
+++ b/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadMessageCreateParams.kt
@@ -889,6 +889,9 @@ private constructor(
fun addTool(codeInterpreter: CodeInterpreterTool) =
addTool(Tool.ofCodeInterpreter(codeInterpreter))
+ /** The tools to add this file to. */
+ fun addToolFileSearch() = addTool(Tool.ofFileSearch())
+
fun additionalProperties(additionalProperties: Map) = apply {
this.additionalProperties.clear()
putAllAdditionalProperties(additionalProperties)
diff --git a/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadRunCreateParams.kt b/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadRunCreateParams.kt
index def84db74..0957f8a76 100644
--- a/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadRunCreateParams.kt
+++ b/openai-java-core/src/main/kotlin/com/openai/models/BetaThreadRunCreateParams.kt
@@ -910,7 +910,7 @@ private constructor(
* associated with the assistant. If not, the model associated with the assistant will
* be used.
*/
- fun model(value: String) = apply { model(ChatModel.of(value)) }
+ fun model(value: String) = model(ChatModel.of(value))
/**
* Whether to enable
@@ -2634,6 +2634,9 @@ private constructor(
fun addTool(codeInterpreter: CodeInterpreterTool) =
addTool(Tool.ofCodeInterpreter(codeInterpreter))
+ /** The tools to add this file to. */
+ fun addToolFileSearch() = addTool(Tool.ofFileSearch())
+
fun additionalProperties(additionalProperties: Map) = apply {
this.additionalProperties.clear()
putAllAdditionalProperties(additionalProperties)
diff --git a/openai-java-core/src/main/kotlin/com/openai/models/ChatCompletionCreateParams.kt b/openai-java-core/src/main/kotlin/com/openai/models/ChatCompletionCreateParams.kt
index 17818fcfa..aa49e72b9 100644
--- a/openai-java-core/src/main/kotlin/com/openai/models/ChatCompletionCreateParams.kt
+++ b/openai-java-core/src/main/kotlin/com/openai/models/ChatCompletionCreateParams.kt
@@ -1431,7 +1431,7 @@ private constructor(
* [model endpoint compatibility](https://platform.openai.com/docs/models#model-endpoint-compatibility)
* table for details on which models work with the Chat API.
*/
- fun model(value: String) = apply { model(ChatModel.of(value)) }
+ fun model(value: String) = model(ChatModel.of(value))
/**
* Parameters for audio output. Required when audio output is requested with
@@ -1527,12 +1527,14 @@ private constructor(
* `none` means the model will not call a function and instead generates a message.
* `auto` means the model can pick between generating a message or calling a function.
*/
+ @Deprecated("deprecated")
fun functionCall(auto: FunctionCall.Auto) = functionCall(FunctionCall.ofAuto(auto))
/**
* Specifying a particular function via `{"name": "my_function"}` forces the model to
* call that function.
*/
+ @Deprecated("deprecated")
fun functionCall(functionCallOption: ChatCompletionFunctionCallOption) =
functionCall(FunctionCall.ofFunctionCallOption(functionCallOption))
@@ -2649,12 +2651,14 @@ private constructor(
* `none` means the model will not call a function and instead generates a message. `auto`
* means the model can pick between generating a message or calling a function.
*/
+ @Deprecated("deprecated")
fun functionCall(auto: FunctionCall.Auto) = apply { body.functionCall(auto) }
/**
* Specifying a particular function via `{"name": "my_function"}` forces the model to call
* that function.
*/
+ @Deprecated("deprecated")
fun functionCall(functionCallOption: ChatCompletionFunctionCallOption) = apply {
body.functionCall(functionCallOption)
}
diff --git a/openai-java-core/src/main/kotlin/com/openai/models/CompletionCreateParams.kt b/openai-java-core/src/main/kotlin/com/openai/models/CompletionCreateParams.kt
index dea43a8cf..f93a9f943 100644
--- a/openai-java-core/src/main/kotlin/com/openai/models/CompletionCreateParams.kt
+++ b/openai-java-core/src/main/kotlin/com/openai/models/CompletionCreateParams.kt
@@ -807,7 +807,7 @@ private constructor(
* all of your available models, or see our
* [Model overview](https://platform.openai.com/docs/models) for descriptions of them.
*/
- fun model(value: String) = apply { model(Model.of(value)) }
+ fun model(value: String) = model(Model.of(value))
/**
* The prompt(s) to generate completions for, encoded as a string, array of strings,
diff --git a/openai-java-core/src/main/kotlin/com/openai/models/EmbeddingCreateParams.kt b/openai-java-core/src/main/kotlin/com/openai/models/EmbeddingCreateParams.kt
index 84d2c5b2c..e3c682d72 100644
--- a/openai-java-core/src/main/kotlin/com/openai/models/EmbeddingCreateParams.kt
+++ b/openai-java-core/src/main/kotlin/com/openai/models/EmbeddingCreateParams.kt
@@ -336,7 +336,7 @@ private constructor(
* all of your available models, or see our
* [Model overview](https://platform.openai.com/docs/models) for descriptions of them.
*/
- fun model(value: String) = apply { model(EmbeddingModel.of(value)) }
+ fun model(value: String) = model(EmbeddingModel.of(value))
/**
* The number of dimensions the resulting output embeddings should have. Only supported
diff --git a/openai-java-core/src/main/kotlin/com/openai/models/FineTuningJobCreateParams.kt b/openai-java-core/src/main/kotlin/com/openai/models/FineTuningJobCreateParams.kt
index fd0c1ea53..7c0eb147f 100644
--- a/openai-java-core/src/main/kotlin/com/openai/models/FineTuningJobCreateParams.kt
+++ b/openai-java-core/src/main/kotlin/com/openai/models/FineTuningJobCreateParams.kt
@@ -450,7 +450,7 @@ private constructor(
* The name of the model to fine-tune. You can select one of the
* [supported models](https://platform.openai.com/docs/guides/fine-tuning#which-models-can-be-fine-tuned).
*/
- fun model(value: String) = apply { model(Model.of(value)) }
+ fun model(value: String) = model(Model.of(value))
/**
* The ID of an uploaded file that contains training data.
diff --git a/openai-java-core/src/main/kotlin/com/openai/models/ImageGenerateParams.kt b/openai-java-core/src/main/kotlin/com/openai/models/ImageGenerateParams.kt
index 85b6b5ebe..21ff71b5f 100644
--- a/openai-java-core/src/main/kotlin/com/openai/models/ImageGenerateParams.kt
+++ b/openai-java-core/src/main/kotlin/com/openai/models/ImageGenerateParams.kt
@@ -343,7 +343,7 @@ private constructor(
fun model(model: JsonField) = apply { this.model = model }
/** The model to use for image generation. */
- fun model(value: String) = apply { model(ImageModel.of(value)) }
+ fun model(value: String) = model(ImageModel.of(value))
/**
* The number of images to generate. Must be between 1 and 10. For `dall-e-3`, only
diff --git a/openai-java-core/src/main/kotlin/com/openai/models/Message.kt b/openai-java-core/src/main/kotlin/com/openai/models/Message.kt
index 2b0ffb2f2..0af5d70a7 100644
--- a/openai-java-core/src/main/kotlin/com/openai/models/Message.kt
+++ b/openai-java-core/src/main/kotlin/com/openai/models/Message.kt
@@ -604,6 +604,10 @@ private constructor(
fun addTool(codeInterpreter: CodeInterpreterTool) =
addTool(Tool.ofCodeInterpreter(codeInterpreter))
+ /** The tools to add this file to. */
+ fun addToolAssistantToolsFileSearchTypeOnly() =
+ addTool(Tool.ofAssistantToolsFileSearchTypeOnly())
+
fun additionalProperties(additionalProperties: Map) = apply {
this.additionalProperties.clear()
putAllAdditionalProperties(additionalProperties)
diff --git a/openai-java-core/src/main/kotlin/com/openai/models/ModerationCreateParams.kt b/openai-java-core/src/main/kotlin/com/openai/models/ModerationCreateParams.kt
index c4206e739..80b5bfa86 100644
--- a/openai-java-core/src/main/kotlin/com/openai/models/ModerationCreateParams.kt
+++ b/openai-java-core/src/main/kotlin/com/openai/models/ModerationCreateParams.kt
@@ -198,7 +198,7 @@ private constructor(
* [the moderation guide](https://platform.openai.com/docs/guides/moderation), and learn
* about available models [here](https://platform.openai.com/docs/models#moderation).
*/
- fun model(value: String) = apply { model(ModerationModel.of(value)) }
+ fun model(value: String) = model(ModerationModel.of(value))
fun additionalProperties(additionalProperties: Map) = apply {
this.additionalProperties.clear()
diff --git a/openai-java-example/build.gradle.kts b/openai-java-example/build.gradle.kts
index 5c20cba00..da5a872b5 100644
--- a/openai-java-example/build.gradle.kts
+++ b/openai-java-example/build.gradle.kts
@@ -6,13 +6,14 @@ plugins {
dependencies {
implementation(project(":openai-java"))
+ api("com.azure:azure-identity:1.15.0")
}
tasks.withType().configureEach {
// Allow using more modern APIs, like `List.of` and `Map.of`, in examples.
- options.release.set(9)
+ options.release.set(11)
}
application {
- mainClass = "com.openai.example.Main"
+ mainClass = "com.openai.example.CompletionsExample"
}
diff --git a/openai-java-example/src/main/java/com/openai/example/AssistantAsyncExample.java b/openai-java-example/src/main/java/com/openai/example/AssistantAsyncExample.java
new file mode 100644
index 000000000..788ae4c82
--- /dev/null
+++ b/openai-java-example/src/main/java/com/openai/example/AssistantAsyncExample.java
@@ -0,0 +1,115 @@
+package com.openai.example;
+
+import com.openai.client.OpenAIClientAsync;
+import com.openai.client.okhttp.OpenAIOkHttpClientAsync;
+import com.openai.models.*;
+import java.util.concurrent.CompletableFuture;
+
+public final class AssistantAsyncExample {
+ private AssistantAsyncExample() {}
+
+ public static void main(String[] args) {
+ // Configures using one of:
+ // - The `OPENAI_API_KEY` environment variable
+ // - The `AZURE_OPENAI_ENDPOINT` and `AZURE_OPENAI_KEY` environment variables
+ OpenAIClientAsync client = OpenAIOkHttpClientAsync.fromEnv();
+
+ CompletableFuture assistantFuture = client.beta()
+ .assistants()
+ .create(BetaAssistantCreateParams.builder()
+ .name("Math Tutor")
+ .instructions("You are a personal math tutor. Write and run code to answer math questions.")
+ // TODO: Update this example once we support `addCodeInterpreterTool()` or similar.
+ .addTool(CodeInterpreterTool.builder().build())
+ .model(ChatModel.GPT_4O_MINI)
+ .build());
+ CompletableFuture threadIdFuture = client.beta()
+ .threads()
+ // TODO: Update this example once we support `.create()` without arguments.
+ .create(BetaThreadCreateParams.builder().build())
+ .thenComposeAsync(thread -> client.beta()
+ .threads()
+ .messages()
+ .create(BetaThreadMessageCreateParams.builder()
+ .threadId(thread.id())
+ .role(BetaThreadMessageCreateParams.Role.USER)
+ .content("I need to solve the equation `3x + 11 = 14`. Can you help me?")
+ .build()))
+ .thenApply(Message::threadId);
+
+ CompletableFuture runFuture = CompletableFuture.allOf(assistantFuture, threadIdFuture)
+ .thenComposeAsync(unused -> client.beta()
+ .threads()
+ .runs()
+ .create(BetaThreadRunCreateParams.builder()
+ .threadId(threadIdFuture.join())
+ .assistantId(assistantFuture.join().id())
+ .instructions("Please address the user as Jane Doe. The user has a premium account.")
+ .build()));
+ CompletableFuture polledRunFuture = runFuture.thenComposeAsync(run -> pollRun(client, run));
+
+ polledRunFuture
+ .thenComposeAsync(run -> {
+ if (!run.status().equals(RunStatus.COMPLETED)) {
+ return CompletableFuture.completedFuture(null);
+ }
+
+ return listThreadMessages(client, run.threadId())
+ .thenComposeAsync(unused -> client.beta()
+ .assistants()
+ .delete(BetaAssistantDeleteParams.builder()
+ .assistantId(assistantFuture.join().id())
+ .build()))
+ .thenAccept(assistantDeleted ->
+ System.out.println("Assistant deleted: " + assistantDeleted.deleted()));
+ })
+ .join();
+ }
+
+ private static CompletableFuture pollRun(OpenAIClientAsync client, Run run) {
+ if (!run.status().equals(RunStatus.QUEUED) && !run.status().equals(RunStatus.IN_PROGRESS)) {
+ System.out.println("Run completed with status: " + run.status() + "\n");
+ return CompletableFuture.completedFuture(run);
+ }
+
+ System.out.println("Polling run...");
+ try {
+ java.lang.Thread.sleep(500);
+ } catch (InterruptedException e) {
+ throw new RuntimeException(e);
+ }
+
+ return client.beta()
+ .threads()
+ .runs()
+ .retrieve(BetaThreadRunRetrieveParams.builder()
+ .threadId(run.threadId())
+ .runId(run.id())
+ .build())
+ .thenComposeAsync(newRun -> pollRun(client, newRun));
+ }
+
+ private static CompletableFuture listThreadMessages(OpenAIClientAsync client, String threadId) {
+ CompletableFuture pageFuture = client.beta()
+ .threads()
+ .messages()
+ .list(BetaThreadMessageListParams.builder()
+ .threadId(threadId)
+ .order(BetaThreadMessageListParams.Order.ASC)
+ .build());
+ return pageFuture.thenComposeAsync(page -> page.autoPager()
+ .forEach(
+ currentMessage -> {
+ System.out.println(currentMessage.role().toString().toUpperCase());
+ currentMessage.content().stream()
+ .flatMap(content -> content.text().stream())
+ .forEach(textBlock ->
+ System.out.println(textBlock.text().value()));
+ System.out.println();
+
+ // Keep iterating
+ return true;
+ },
+ pageFuture.defaultExecutor()));
+ }
+}
diff --git a/openai-java-example/src/main/java/com/openai/example/AssistantExample.java b/openai-java-example/src/main/java/com/openai/example/AssistantExample.java
new file mode 100644
index 000000000..379d1ab33
--- /dev/null
+++ b/openai-java-example/src/main/java/com/openai/example/AssistantExample.java
@@ -0,0 +1,85 @@
+package com.openai.example;
+
+import com.openai.client.OpenAIClient;
+import com.openai.client.okhttp.OpenAIOkHttpClient;
+import com.openai.models.*;
+import com.openai.models.Thread;
+
+public final class AssistantExample {
+ private AssistantExample() {}
+
+ public static void main(String[] args) throws InterruptedException {
+ // Configures using one of:
+ // - The `OPENAI_API_KEY` environment variable
+ // - The `AZURE_OPENAI_ENDPOINT` and `AZURE_OPENAI_KEY` environment variables
+ OpenAIClient client = OpenAIOkHttpClient.fromEnv();
+
+ Assistant assistant = client.beta()
+ .assistants()
+ .create(BetaAssistantCreateParams.builder()
+ .name("Math Tutor")
+ .instructions("You are a personal math tutor. Write and run code to answer math questions.")
+ // TODO: Update this example once we support `addCodeInterpreterTool()` or similar.
+ .addTool(CodeInterpreterTool.builder().build())
+ .model(ChatModel.GPT_4O_MINI)
+ .build());
+ Thread thread =
+ // TODO: Update this example once we support `.create()` without arguments.
+ client.beta().threads().create(BetaThreadCreateParams.builder().build());
+ client.beta()
+ .threads()
+ .messages()
+ .create(BetaThreadMessageCreateParams.builder()
+ .threadId(thread.id())
+ .role(BetaThreadMessageCreateParams.Role.USER)
+ .content("I need to solve the equation `3x + 11 = 14`. Can you help me?")
+ .build());
+
+ Run run = client.beta()
+ .threads()
+ .runs()
+ .create(BetaThreadRunCreateParams.builder()
+ .threadId(thread.id())
+ .assistantId(assistant.id())
+ .instructions("Please address the user as Jane Doe. The user has a premium account.")
+ .build());
+ while (run.status().equals(RunStatus.QUEUED) || run.status().equals(RunStatus.IN_PROGRESS)) {
+ System.out.println("Polling run...");
+ java.lang.Thread.sleep(500);
+ run = client.beta()
+ .threads()
+ .runs()
+ .retrieve(BetaThreadRunRetrieveParams.builder()
+ .threadId(thread.id())
+ .runId(run.id())
+ .build());
+ }
+ System.out.println("Run completed with status: " + run.status() + "\n");
+
+ if (!run.status().equals(RunStatus.COMPLETED)) {
+ return;
+ }
+
+ BetaThreadMessageListPage page = client.beta()
+ .threads()
+ .messages()
+ .list(BetaThreadMessageListParams.builder()
+ .threadId(thread.id())
+ .order(BetaThreadMessageListParams.Order.ASC)
+ .build());
+ page.autoPager().stream().forEach(currentMessage -> {
+ System.out.println(currentMessage.role().toString().toUpperCase());
+ currentMessage.content().stream()
+ .flatMap(content -> content.text().stream())
+ .forEach(textBlock -> System.out.println(textBlock.text().value()));
+ System.out.println();
+ });
+
+ AssistantDeleted assistantDeleted = client.beta()
+ .assistants()
+ .delete(BetaAssistantDeleteParams.builder()
+ .assistantId(assistant.id())
+ .build());
+ System.out.println("Assistant deleted: " + assistantDeleted.deleted());
+ }
+}
diff --git a/openai-java-example/src/main/java/com/openai/example/AzureEntraIdExample.java b/openai-java-example/src/main/java/com/openai/example/AzureEntraIdExample.java
new file mode 100644
index 000000000..ccae08110
--- /dev/null
+++ b/openai-java-example/src/main/java/com/openai/example/AzureEntraIdExample.java
@@ -0,0 +1,40 @@
+package com.openai.example;
+
+import com.azure.identity.AuthenticationUtil;
+import com.azure.identity.DefaultAzureCredentialBuilder;
+import com.openai.client.OpenAIClient;
+import com.openai.client.okhttp.OpenAIOkHttpClient;
+import com.openai.credential.BearerTokenCredential;
+import com.openai.models.ChatCompletionCreateParams;
+import com.openai.models.ChatCompletionDeveloperMessageParam;
+import com.openai.models.ChatCompletionUserMessageParam;
+import com.openai.models.ChatModel;
+
+public final class AzureEntraIdExample {
+ private AzureEntraIdExample() {}
+
+ public static void main(String[] args) {
+ OpenAIClient client = OpenAIOkHttpClient.builder()
+ // Gets the API key from the `AZURE_OPENAI_KEY` environment variable
+ .fromEnv()
+ // Set the Azure Entra ID
+ .credential(BearerTokenCredential.create(AuthenticationUtil.getBearerTokenSupplier(
+ new DefaultAzureCredentialBuilder().build(), "https://cognitiveservices.azure.com/.default")))
+ .build();
+
+ ChatCompletionCreateParams createParams = ChatCompletionCreateParams.builder()
+ .model(ChatModel.GPT_3_5_TURBO)
+ .maxCompletionTokens(2048)
+ .addMessage(ChatCompletionDeveloperMessageParam.builder()
+ .content("Make sure you mention Stainless!")
+ .build())
+ .addMessage(ChatCompletionUserMessageParam.builder()
+ .content("Tell me a story about building the best SDK!")
+ .build())
+ .build();
+
+ client.chat().completions().create(createParams).choices().stream()
+ .flatMap(choice -> choice.message().content().stream())
+ .forEach(System.out::println);
+ }
+}
diff --git a/openai-java-example/src/main/java/com/openai/example/CompletionsAsyncExample.java b/openai-java-example/src/main/java/com/openai/example/CompletionsAsyncExample.java
new file mode 100644
index 000000000..3913ae9cf
--- /dev/null
+++ b/openai-java-example/src/main/java/com/openai/example/CompletionsAsyncExample.java
@@ -0,0 +1,35 @@
+package com.openai.example;
+
+import com.openai.client.OpenAIClientAsync;
+import com.openai.client.okhttp.OpenAIOkHttpClientAsync;
+import com.openai.models.*;
+
+public final class CompletionsAsyncExample {
+ private CompletionsAsyncExample() {}
+
+ public static void main(String[] args) {
+ // Configures using one of:
+ // - The `OPENAI_API_KEY` environment variable
+ // - The `AZURE_OPENAI_ENDPOINT` and `AZURE_OPENAI_KEY` environment variables
+ OpenAIClientAsync client = OpenAIOkHttpClientAsync.fromEnv();
+
+ ChatCompletionCreateParams createParams = ChatCompletionCreateParams.builder()
+ .model(ChatModel.GPT_3_5_TURBO)
+ .maxCompletionTokens(2048)
+ .addMessage(ChatCompletionDeveloperMessageParam.builder()
+ .content("Make sure you mention Stainless!")
+ .build())
+ .addMessage(ChatCompletionUserMessageParam.builder()
+ .content("Tell me a story about building the best SDK!")
+ .build())
+ .build();
+
+ client.chat()
+ .completions()
+ .create(createParams)
+ .thenAccept(completion -> completion.choices().stream()
+ .flatMap(choice -> choice.message().content().stream())
+ .forEach(System.out::println))
+ .join();
+ }
+}
diff --git a/openai-java-example/src/main/java/com/openai/example/CompletionsConversationAsyncExample.java b/openai-java-example/src/main/java/com/openai/example/CompletionsConversationAsyncExample.java
new file mode 100644
index 000000000..2512ca4c6
--- /dev/null
+++ b/openai-java-example/src/main/java/com/openai/example/CompletionsConversationAsyncExample.java
@@ -0,0 +1,61 @@
+package com.openai.example;
+
+import static java.util.stream.Collectors.toList;
+
+import com.openai.client.OpenAIClientAsync;
+import com.openai.client.okhttp.OpenAIOkHttpClientAsync;
+import com.openai.models.*;
+import java.util.List;
+import java.util.concurrent.CompletableFuture;
+
+public final class CompletionsConversationAsyncExample {
+ private CompletionsConversationAsyncExample() {}
+
+ public static void main(String[] args) {
+ // Configures using one of:
+ // - The `OPENAI_API_KEY` environment variable
+ // - The `AZURE_OPENAI_ENDPOINT` and `AZURE_OPENAI_KEY` environment variables
+ OpenAIClientAsync client = OpenAIOkHttpClientAsync.fromEnv();
+
+ // Use a builder so that we can append more messages to it below.
+ // Each time we call .build()` we get an immutable object that's unaffected by future mutations of the builder.
+ ChatCompletionCreateParams.Builder createParamsBuilder = ChatCompletionCreateParams.builder()
+ .model(ChatModel.GPT_3_5_TURBO)
+ .maxCompletionTokens(2048)
+ .addMessage(ChatCompletionDeveloperMessageParam.builder()
+ .content("Make sure you mention Stainless!")
+ .build())
+ .addMessage(ChatCompletionUserMessageParam.builder()
+ .content("Tell me a story about building the best SDK!")
+ .build());
+
+ CompletableFuture future = CompletableFuture.completedFuture(null);
+ for (int i = 0; i < 4; i++) {
+ final int index = i;
+ future = future.thenComposeAsync(
+ unused -> client.chat().completions().create(createParamsBuilder.build()))
+ .thenAccept(completion -> {
+ List messages = completion.choices().stream()
+ .map(ChatCompletion.Choice::message)
+ .collect(toList());
+
+ messages.stream()
+ .flatMap(message -> message.content().stream())
+ .forEach(System.out::println);
+
+ System.out.println("\n-----------------------------------\n");
+
+ messages.forEach(createParamsBuilder::addMessage);
+ createParamsBuilder
+ .addMessage(ChatCompletionDeveloperMessageParam.builder()
+ .content("Be as snarky as possible when replying!" + "!".repeat(index))
+ .build())
+ .addMessage(ChatCompletionUserMessageParam.builder()
+ .content("But why?" + "?".repeat(index))
+ .build());
+ });
+ }
+
+ future.join();
+ }
+}
diff --git a/openai-java-example/src/main/java/com/openai/example/CompletionsConversationExample.java b/openai-java-example/src/main/java/com/openai/example/CompletionsConversationExample.java
new file mode 100644
index 000000000..c337d9c54
--- /dev/null
+++ b/openai-java-example/src/main/java/com/openai/example/CompletionsConversationExample.java
@@ -0,0 +1,51 @@
+package com.openai.example;
+
+import static java.util.stream.Collectors.toList;
+
+import com.openai.client.OpenAIClient;
+import com.openai.client.okhttp.OpenAIOkHttpClient;
+import com.openai.models.*;
+import java.util.List;
+
+public final class CompletionsConversationExample {
+ private CompletionsConversationExample() {}
+
+ public static void main(String[] args) {
+ // Configures using one of:
+ // - The `OPENAI_API_KEY` environment variable
+ // - The `AZURE_OPENAI_ENDPOINT` and `AZURE_OPENAI_KEY` environment variables
+ OpenAIClient client = OpenAIOkHttpClient.fromEnv();
+
+ // Use a builder so that we can append more messages to it below.
+ // Each time we call .build()` we get an immutable object that's unaffected by future mutations of the builder.
+ ChatCompletionCreateParams.Builder createParamsBuilder = ChatCompletionCreateParams.builder()
+ .model(ChatModel.GPT_3_5_TURBO)
+ .maxCompletionTokens(2048)
+ .addMessage(ChatCompletionDeveloperMessageParam.builder()
+ .content("Make sure you mention Stainless!")
+ .build())
+ .addMessage(ChatCompletionUserMessageParam.builder()
+ .content("Tell me a story about building the best SDK!")
+ .build());
+
+ for (int i = 0; i < 4; i++) {
+ List messages =
+ client.chat().completions().create(createParamsBuilder.build()).choices().stream()
+ .map(ChatCompletion.Choice::message)
+ .collect(toList());
+
+ messages.stream().flatMap(message -> message.content().stream()).forEach(System.out::println);
+
+ System.out.println("\n-----------------------------------\n");
+
+ messages.forEach(createParamsBuilder::addMessage);
+ createParamsBuilder
+ .addMessage(ChatCompletionDeveloperMessageParam.builder()
+ .content("Be as snarky as possible when replying!" + "!".repeat(i))
+ .build())
+ .addMessage(ChatCompletionUserMessageParam.builder()
+ .content("But why?" + "?".repeat(i))
+ .build());
+ }
+ }
+}
diff --git a/openai-java-example/src/main/java/com/openai/example/CompletionsExample.java b/openai-java-example/src/main/java/com/openai/example/CompletionsExample.java
new file mode 100644
index 000000000..c6bdd270e
--- /dev/null
+++ b/openai-java-example/src/main/java/com/openai/example/CompletionsExample.java
@@ -0,0 +1,34 @@
+package com.openai.example;
+
+import com.openai.client.OpenAIClient;
+import com.openai.client.okhttp.OpenAIOkHttpClient;
+import com.openai.models.ChatCompletionCreateParams;
+import com.openai.models.ChatCompletionDeveloperMessageParam;
+import com.openai.models.ChatCompletionUserMessageParam;
+import com.openai.models.ChatModel;
+
+public final class CompletionsExample {
+ private CompletionsExample() {}
+
+ public static void main(String[] args) {
+ // Configures using one of:
+ // - The `OPENAI_API_KEY` environment variable
+ // - The `AZURE_OPENAI_ENDPOINT` and `AZURE_OPENAI_KEY` environment variables
+ OpenAIClient client = OpenAIOkHttpClient.fromEnv();
+
+ ChatCompletionCreateParams createParams = ChatCompletionCreateParams.builder()
+ .model(ChatModel.GPT_3_5_TURBO)
+ .maxCompletionTokens(2048)
+ .addMessage(ChatCompletionDeveloperMessageParam.builder()
+ .content("Make sure you mention Stainless!")
+ .build())
+ .addMessage(ChatCompletionUserMessageParam.builder()
+ .content("Tell me a story about building the best SDK!")
+ .build())
+ .build();
+
+ client.chat().completions().create(createParams).choices().stream()
+ .flatMap(choice -> choice.message().content().stream())
+ .forEach(System.out::println);
+ }
+}
diff --git a/openai-java-example/src/main/java/com/openai/example/CompletionsStreamingAsyncExample.java b/openai-java-example/src/main/java/com/openai/example/CompletionsStreamingAsyncExample.java
new file mode 100644
index 000000000..c42635065
--- /dev/null
+++ b/openai-java-example/src/main/java/com/openai/example/CompletionsStreamingAsyncExample.java
@@ -0,0 +1,49 @@
+package com.openai.example;
+
+import com.openai.client.OpenAIClientAsync;
+import com.openai.client.okhttp.OpenAIOkHttpClientAsync;
+import com.openai.core.http.AsyncStreamResponse;
+import com.openai.models.*;
+import java.util.Optional;
+import java.util.concurrent.CompletableFuture;
+
+public final class CompletionsStreamingAsyncExample {
+ private CompletionsStreamingAsyncExample() {}
+
+ public static void main(String[] args) throws Exception {
+ // Configures using one of:
+ // - The `OPENAI_API_KEY` environment variable
+ // - The `AZURE_OPENAI_ENDPOINT` and `AZURE_OPENAI_KEY` environment variables
+ OpenAIClientAsync client = OpenAIOkHttpClientAsync.fromEnv();
+
+ ChatCompletionCreateParams createParams = ChatCompletionCreateParams.builder()
+ .model(ChatModel.GPT_3_5_TURBO)
+ .maxCompletionTokens(2048)
+ .addMessage(ChatCompletionDeveloperMessageParam.builder()
+ .content("Make sure you mention Stainless!")
+ .build())
+ .addMessage(ChatCompletionUserMessageParam.builder()
+ .content("Tell me a story about building the best SDK!")
+ .build())
+ .build();
+
+ CompletableFuture onCompleteFuture = new CompletableFuture<>();
+
+ // TODO: Update this example once we support expose an `onCompleteFuture()` method.
+ client.chat().completions().createStreaming(createParams).subscribe(new AsyncStreamResponse.Handler<>() {
+ @Override
+ public void onNext(ChatCompletionChunk completion) {
+ completion.choices().stream()
+ .flatMap(choice -> choice.delta().content().stream())
+ .forEach(System.out::print);
+ }
+
+ @Override
+ public void onComplete(Optional error) {
+ onCompleteFuture.complete(null);
+ }
+ });
+
+ onCompleteFuture.join();
+ }
+}
diff --git a/openai-java-example/src/main/java/com/openai/example/CompletionsStreamingExample.java b/openai-java-example/src/main/java/com/openai/example/CompletionsStreamingExample.java
new file mode 100644
index 000000000..bd54654ea
--- /dev/null
+++ b/openai-java-example/src/main/java/com/openai/example/CompletionsStreamingExample.java
@@ -0,0 +1,36 @@
+package com.openai.example;
+
+import com.openai.client.OpenAIClient;
+import com.openai.client.okhttp.OpenAIOkHttpClient;
+import com.openai.core.http.StreamResponse;
+import com.openai.models.*;
+
+public final class CompletionsStreamingExample {
+ private CompletionsStreamingExample() {}
+
+ public static void main(String[] args) throws Exception {
+ // Configures using one of:
+ // - The `OPENAI_API_KEY` environment variable
+ // - The `AZURE_OPENAI_ENDPOINT` and `AZURE_OPENAI_KEY` environment variables
+ OpenAIClient client = OpenAIOkHttpClient.fromEnv();
+
+ ChatCompletionCreateParams createParams = ChatCompletionCreateParams.builder()
+ .model(ChatModel.GPT_3_5_TURBO)
+ .maxCompletionTokens(2048)
+ .addMessage(ChatCompletionDeveloperMessageParam.builder()
+ .content("Make sure you mention Stainless!")
+ .build())
+ .addMessage(ChatCompletionUserMessageParam.builder()
+ .content("Tell me a story about building the best SDK!")
+ .build())
+ .build();
+
+ try (StreamResponse streamResponse =
+ client.chat().completions().createStreaming(createParams)) {
+ streamResponse.stream()
+ .flatMap(completion -> completion.choices().stream())
+ .flatMap(choice -> choice.delta().content().stream())
+ .forEach(System.out::print);
+ }
+ }
+}
diff --git a/openai-java-example/src/main/java/com/openai/example/Main.java b/openai-java-example/src/main/java/com/openai/example/Main.java
deleted file mode 100644
index 8daeb5598..000000000
--- a/openai-java-example/src/main/java/com/openai/example/Main.java
+++ /dev/null
@@ -1,39 +0,0 @@
-package com.openai.example;
-
-import com.openai.client.OpenAIClient;
-import com.openai.client.okhttp.OpenAIOkHttpClient;
-import com.openai.core.http.StreamResponse;
-import com.openai.models.*;
-
-public final class Main {
- private Main() {}
-
- public static void main(String[] args) {
- OpenAIClient client = OpenAIOkHttpClient.fromEnv();
- ChatCompletionCreateParams completionCreateParams = ChatCompletionCreateParams.builder()
- .model(ChatModel.GPT_3_5_TURBO)
- .maxCompletionTokens(1024)
- .addMessage(ChatCompletionUserMessageParam.builder()
- .content("Tell me a story about building the best SDK!")
- .build())
- .build();
-
- // Non-streaming example
- client.chat().completions().create(completionCreateParams).choices().stream()
- .flatMap(choice -> choice.message().content().stream())
- .forEach(System.out::println);
-
- System.out.println("\n-----------------------------------\n");
-
- // Streaming example
- try (StreamResponse messageStreamResponse =
- client.chat().completions().createStreaming(completionCreateParams)) {
- messageStreamResponse.stream()
- .flatMap(completion -> completion.choices().stream())
- .flatMap(choice -> choice.delta().content().stream())
- .forEach(System.out::print);
- } catch (Exception e) {
- System.out.println(e.getMessage());
- }
- }
-}
diff --git a/openai-java-example/src/main/java/com/openai/example/ModelListAsyncExample.java b/openai-java-example/src/main/java/com/openai/example/ModelListAsyncExample.java
new file mode 100644
index 000000000..9df7af64f
--- /dev/null
+++ b/openai-java-example/src/main/java/com/openai/example/ModelListAsyncExample.java
@@ -0,0 +1,32 @@
+package com.openai.example;
+
+import com.openai.client.OpenAIClientAsync;
+import com.openai.client.okhttp.OpenAIOkHttpClientAsync;
+import com.openai.models.ModelListPageAsync;
+import com.openai.models.ModelListParams;
+import java.util.concurrent.CompletableFuture;
+
+public final class ModelListAsyncExample {
+ private ModelListAsyncExample() {}
+
+ public static void main(String[] args) {
+ // Configures using one of:
+ // - The `OPENAI_API_KEY` environment variable
+ // - The `AZURE_OPENAI_ENDPOINT` and `AZURE_OPENAI_KEY` environment variables
+ OpenAIClientAsync client = OpenAIOkHttpClientAsync.fromEnv();
+
+ CompletableFuture pageFuture =
+ // TODO: Update this example once we support `.list()` without arguments.
+ client.models().list(ModelListParams.builder().build());
+ pageFuture
+ .thenComposeAsync(page -> page.autoPager()
+ .forEach(
+ model -> {
+ System.out.println(model.id());
+ // Keep iterating
+ return true;
+ },
+ pageFuture.defaultExecutor()))
+ .join();
+ }
+}
diff --git a/openai-java-example/src/main/java/com/openai/example/ModelListExample.java b/openai-java-example/src/main/java/com/openai/example/ModelListExample.java
new file mode 100644
index 000000000..93bb7c19f
--- /dev/null
+++ b/openai-java-example/src/main/java/com/openai/example/ModelListExample.java
@@ -0,0 +1,22 @@
+package com.openai.example;
+
+import com.openai.client.OpenAIClient;
+import com.openai.client.okhttp.OpenAIOkHttpClient;
+import com.openai.models.*;
+
+public final class ModelListExample {
+ private ModelListExample() {}
+
+ public static void main(String[] args) {
+ // Configures using one of:
+ // - The `OPENAI_API_KEY` environment variable
+ // - The `AZURE_OPENAI_ENDPOINT` and `AZURE_OPENAI_KEY` environment variables
+ OpenAIClient client = OpenAIOkHttpClient.fromEnv();
+
+ client.models()
+ // TODO: Update this example once we support `.list()` without arguments.
+ .list(ModelListParams.builder().build())
+ .autoPager()
+ .forEach(model -> System.out.println(model.id()));
+ }
+}
diff --git a/openai-java-example/src/main/java/com/openai/example/StructuredOutputsAsyncExample.java b/openai-java-example/src/main/java/com/openai/example/StructuredOutputsAsyncExample.java
new file mode 100644
index 000000000..37ba2dd65
--- /dev/null
+++ b/openai-java-example/src/main/java/com/openai/example/StructuredOutputsAsyncExample.java
@@ -0,0 +1,50 @@
+package com.openai.example;
+
+import com.openai.client.OpenAIClientAsync;
+import com.openai.client.okhttp.OpenAIOkHttpClientAsync;
+import com.openai.core.JsonValue;
+import com.openai.models.ChatCompletionCreateParams;
+import com.openai.models.ChatCompletionUserMessageParam;
+import com.openai.models.ChatModel;
+import com.openai.models.ResponseFormatJsonSchema;
+import com.openai.models.ResponseFormatJsonSchema.JsonSchema;
+import java.util.Map;
+
+public final class StructuredOutputsAsyncExample {
+ private StructuredOutputsAsyncExample() {}
+
+ public static void main(String[] args) {
+ // Configures using one of:
+ // - The `OPENAI_API_KEY` environment variable
+ // - The `AZURE_OPENAI_ENDPOINT` and `AZURE_OPENAI_KEY` environment variables
+ OpenAIClientAsync client = OpenAIOkHttpClientAsync.fromEnv();
+
+ // TODO: Update this once we support extracting JSON schemas from Java classes
+ JsonSchema.Schema schema = JsonSchema.Schema.builder()
+ .putAdditionalProperty("type", JsonValue.from("object"))
+ .putAdditionalProperty(
+ "properties", JsonValue.from(Map.of("employees", Map.of("items", Map.of("type", "string")))))
+ .build();
+ ChatCompletionCreateParams createParams = ChatCompletionCreateParams.builder()
+ .model(ChatModel.GPT_4O_MINI)
+ .maxCompletionTokens(2048)
+ .responseFormat(ResponseFormatJsonSchema.builder()
+ .jsonSchema(JsonSchema.builder()
+ .name("employee-list")
+ .schema(schema)
+ .build())
+ .build())
+ .addMessage(ChatCompletionUserMessageParam.builder()
+ .content("Who works at OpenAI?")
+ .build())
+ .build();
+
+ client.chat()
+ .completions()
+ .create(createParams)
+ .thenAccept(completion -> completion.choices().stream()
+ .flatMap(choice -> choice.message().content().stream())
+ .forEach(System.out::println))
+ .join();
+ }
+}
diff --git a/openai-java-example/src/main/java/com/openai/example/StructuredOutputsExample.java b/openai-java-example/src/main/java/com/openai/example/StructuredOutputsExample.java
new file mode 100644
index 000000000..e3faf8437
--- /dev/null
+++ b/openai-java-example/src/main/java/com/openai/example/StructuredOutputsExample.java
@@ -0,0 +1,43 @@
+package com.openai.example;
+
+import com.openai.client.OpenAIClient;
+import com.openai.client.okhttp.OpenAIOkHttpClient;
+import com.openai.core.JsonValue;
+import com.openai.models.*;
+import com.openai.models.ResponseFormatJsonSchema.JsonSchema;
+import java.util.Map;
+
+public final class StructuredOutputsExample {
+ private StructuredOutputsExample() {}
+
+ public static void main(String[] args) {
+ // Configures using one of:
+ // - The `OPENAI_API_KEY` environment variable
+ // - The `AZURE_OPENAI_ENDPOINT` and `AZURE_OPENAI_KEY` environment variables
+ OpenAIClient client = OpenAIOkHttpClient.fromEnv();
+
+ // TODO: Update this once we support extracting JSON schemas from Java classes
+ JsonSchema.Schema schema = JsonSchema.Schema.builder()
+ .putAdditionalProperty("type", JsonValue.from("object"))
+ .putAdditionalProperty(
+ "properties", JsonValue.from(Map.of("employees", Map.of("items", Map.of("type", "string")))))
+ .build();
+ ChatCompletionCreateParams createParams = ChatCompletionCreateParams.builder()
+ .model(ChatModel.GPT_4O_MINI)
+ .maxCompletionTokens(2048)
+ .responseFormat(ResponseFormatJsonSchema.builder()
+ .jsonSchema(JsonSchema.builder()
+ .name("employee-list")
+ .schema(schema)
+ .build())
+ .build())
+ .addMessage(ChatCompletionUserMessageParam.builder()
+ .content("Who works at OpenAI?")
+ .build())
+ .build();
+
+ client.chat().completions().create(createParams).choices().stream()
+ .flatMap(choice -> choice.message().content().stream())
+ .forEach(System.out::println);
+ }
+}
diff --git a/settings.gradle.kts b/settings.gradle.kts
index 58e9de020..3c5725fb3 100644
--- a/settings.gradle.kts
+++ b/settings.gradle.kts
@@ -4,4 +4,3 @@ include("openai-java")
include("openai-java-client-okhttp")
include("openai-java-core")
include("openai-java-example")
-include("openai-azure-java-example")