diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/openai-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/openai-chat.adoc index a1d7a359adc..e62c809f77e 100644 --- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/openai-chat.adoc +++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/openai-chat.adoc @@ -514,7 +514,9 @@ Next, create an `OpenAiChatModel` and use it for text generations: [source,java] ---- -var openAiApi = new OpenAiApi(System.getenv("OPENAI_API_KEY")); +var openAiApi = OpenAiApi.builder() + .apiKey(System.getenv("OPENAI_API_KEY")) + .build(); var openAiChatOptions = OpenAiChatOptions.builder() .model("gpt-3.5-turbo") .temperature(0.4) @@ -531,7 +533,7 @@ Flux response = this.chatModel.stream( ---- The `OpenAiChatOptions` provides the configuration information for the chat requests. -The `OpenAiChatOptions.Builder` is a fluent options-builder. +The `OpenAiApi.Builder` and `OpenAiChatOptions.Builder` are fluent options-builders for API client and chat config respectively. == Low-level OpenAiApi Client [[low-level-api]] @@ -545,8 +547,9 @@ Here is a simple snippet showing how to use the API programmatically: [source,java] ---- -OpenAiApi openAiApi = - new OpenAiApi(System.getenv("OPENAI_API_KEY")); +OpenAiApi openAiApi = OpenAiApi.builder() + .apiKey(System.getenv("OPENAI_API_KEY")) + .build(); ChatCompletionMessage chatCompletionMessage = new ChatCompletionMessage("Hello world", Role.USER); diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/embeddings/openai-embeddings.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/embeddings/openai-embeddings.adoc index bcb174cab94..23c9626fc1f 100644 --- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/embeddings/openai-embeddings.adoc +++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/embeddings/openai-embeddings.adoc @@ -212,7 +212,9 @@ Next, create an `OpenAiEmbeddingModel` instance and use it to compute the simila [source,java] ---- -var openAiApi = new OpenAiApi(System.getenv("OPENAI_API_KEY")); +var openAiApi = OpenAiApi.builder() + .apiKey(System.getenv("OPENAI_API_KEY")) + .build(); var embeddingModel = new OpenAiEmbeddingModel( this.openAiApi, @@ -228,6 +230,6 @@ EmbeddingResponse embeddingResponse = this.embeddingModel ---- The `OpenAiEmbeddingOptions` provides the configuration information for the embedding requests. -The options class offers a `builder()` for easy options creation. +The api and options class offers a `builder()` for easy options creation. diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/usage-handling.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/usage-handling.adoc index 3ec4c35a0b9..3a9daf396d6 100644 --- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/usage-handling.adoc +++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/usage-handling.adoc @@ -13,9 +13,9 @@ Object getNativeUsage(); ``` This method allows access to the model-specific native usage data, enabling more detailed usage tracking when needed. -=== Using with ChatClient +=== Using with ChatModel -Here's a complete example showing how to track usage with OpenAI's ChatClient: +Here's a complete example showing how to track usage with OpenAI's ChatModel: ```java @SpringBootConfiguration @@ -23,12 +23,16 @@ public class Configuration { @Bean public OpenAiApi chatCompletionApi() { - return new OpenAiApi(System.getenv("OPENAI_API_KEY")); + return OpenAiApi.builder() + .apiKey(System.getenv("OPENAI_API_KEY")) + .build(); } @Bean public OpenAiChatModel openAiClient(OpenAiApi openAiApi) { - return new OpenAiChatModel(openAiApi); + return OpenAiChatModel.builder() + .openAiApi(openAiApi) + .build(); } } @@ -46,10 +50,8 @@ public class ChatService { // Create a chat prompt Prompt prompt = new Prompt("What is the weather like today?"); - ChatClient chatClient = ChatClient.builder(this.chatModel).build(); - // Get the chat response - ChatResponse response = chatClient.call(prompt); + ChatResponse response = this.chatModel.call(prompt); // Access the usage information Usage usage = response.getMetadata().getUsage(); @@ -80,6 +82,26 @@ public class ChatService { } ``` +=== Using with ChatClient + +If you are using the `ChatClient`, you can access the usage information using the `ChatResponse` object: + +```java +// Create a chat prompt +Prompt prompt = new Prompt("What is the weather like today?"); + +// Create a chat client +ChatClient chatClient = ChatClient.create(chatModel); + +// Get the chat response +ChatResponse response = chatClient.prompt(prompt) + .call() + .chatResponse(); + +// Access the usage information +Usage usage = response.getMetadata().getUsage(); +``` + == Benefits **Standardization**: Provides a consistent way to handle usage across different AI models