diff --git a/advisors/spring-ai-advisors-vector-store/src/main/java/org/springframework/ai/chat/client/advisor/vectorstore/VectorStoreChatMemoryAdvisor.java b/advisors/spring-ai-advisors-vector-store/src/main/java/org/springframework/ai/chat/client/advisor/vectorstore/VectorStoreChatMemoryAdvisor.java index c09b9719ff0..76c7ad36883 100644 --- a/advisors/spring-ai-advisors-vector-store/src/main/java/org/springframework/ai/chat/client/advisor/vectorstore/VectorStoreChatMemoryAdvisor.java +++ b/advisors/spring-ai-advisors-vector-store/src/main/java/org/springframework/ai/chat/client/advisor/vectorstore/VectorStoreChatMemoryAdvisor.java @@ -140,7 +140,7 @@ private AdvisedRequest before(AdvisedRequest request) { .systemParams(advisedSystemParams) .build(); - UserMessage userMessage = new UserMessage(request.userText(), request.media()); + UserMessage userMessage = UserMessage.builder().text(request.userText()).media(request.media()).build(); this.getChatMemoryStore() .write(toDocuments(List.of(userMessage), this.doGetConversationId(request.adviseContext()))); diff --git a/auto-configurations/models/chat/memory/spring-ai-autoconfigure-model-chat-memory-neo4j/src/test/java/org/springframework/ai/model/chat/memory/neo4j/autoconfigure/Neo4jChatMemoryAutoConfigurationIT.java b/auto-configurations/models/chat/memory/spring-ai-autoconfigure-model-chat-memory-neo4j/src/test/java/org/springframework/ai/model/chat/memory/neo4j/autoconfigure/Neo4jChatMemoryAutoConfigurationIT.java index 95d7e8a2306..1a9705442bd 100644 --- a/auto-configurations/models/chat/memory/spring-ai-autoconfigure-model-chat-memory-neo4j/src/test/java/org/springframework/ai/model/chat/memory/neo4j/autoconfigure/Neo4jChatMemoryAutoConfigurationIT.java +++ b/auto-configurations/models/chat/memory/spring-ai-autoconfigure-model-chat-memory-neo4j/src/test/java/org/springframework/ai/model/chat/memory/neo4j/autoconfigure/Neo4jChatMemoryAutoConfigurationIT.java @@ -101,7 +101,7 @@ void addAndGet() { .data("hello".getBytes(StandardCharsets.UTF_8)) .build(), Media.builder().data(URI.create("http://www.google.com").toURL()).mimeType(textPlain).build()); - UserMessage userMessageWithMedia = new UserMessage("Message with media", media); + UserMessage userMessageWithMedia = UserMessage.builder().text("Message with media").media(media).build(); memory.add(sessionId, userMessageWithMedia); messages = memory.get(sessionId, Integer.MAX_VALUE); diff --git a/memory/spring-ai-model-chat-memory-neo4j/src/main/java/org/springframework/ai/chat/memory/neo4j/Neo4jChatMemory.java b/memory/spring-ai-model-chat-memory-neo4j/src/main/java/org/springframework/ai/chat/memory/neo4j/Neo4jChatMemory.java index 0acae6f0734..4a9fd7250fa 100644 --- a/memory/spring-ai-model-chat-memory-neo4j/src/main/java/org/springframework/ai/chat/memory/neo4j/Neo4jChatMemory.java +++ b/memory/spring-ai-model-chat-memory-neo4j/src/main/java/org/springframework/ai/chat/memory/neo4j/Neo4jChatMemory.java @@ -265,8 +265,11 @@ private Message buildUserMessage(org.neo4j.driver.Record record, Map mediaList) { Message message; Map metadata = record.get("metadata").asMap(); - message = new UserMessage(messageMap.get(MessageAttributes.TEXT_CONTENT.getValue()).toString(), mediaList, - metadata); + message = UserMessage.builder() + .text(messageMap.get(MessageAttributes.TEXT_CONTENT.getValue()).toString()) + .media(mediaList) + .metadata(metadata) + .build(); return message; } diff --git a/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicChatModelIT.java b/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicChatModelIT.java index e404187f636..5c8edc1e4b5 100644 --- a/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicChatModelIT.java +++ b/models/spring-ai-anthropic/src/test/java/org/springframework/ai/anthropic/AnthropicChatModelIT.java @@ -236,8 +236,10 @@ void multiModalityTest() throws IOException { var imageData = new ClassPathResource("/test.png"); - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))) + .build(); var response = this.chatModel.call(new Prompt(List.of(userMessage))); @@ -251,9 +253,10 @@ void multiModalityPdfTest() throws IOException { var pdfData = new ClassPathResource("/spring-ai-reference-overview.pdf"); - var userMessage = new UserMessage( - "You are a very professional document summarization specialist. Please summarize the given document.", - List.of(new Media(new MimeType("application", "pdf"), pdfData))); + var userMessage = UserMessage.builder() + .text("You are a very professional document summarization specialist. Please summarize the given document.") + .media(List.of(new Media(new MimeType("application", "pdf"), pdfData))) + .build(); var response = this.chatModel.call(new Prompt(List.of(userMessage), ToolCallingChatOptions.builder().model(AnthropicApi.ChatModel.CLAUDE_3_5_SONNET.getName()).build())); diff --git a/models/spring-ai-bedrock-converse/src/test/java/org/springframework/ai/bedrock/converse/BedrockProxyChatModelIT.java b/models/spring-ai-bedrock-converse/src/test/java/org/springframework/ai/bedrock/converse/BedrockProxyChatModelIT.java index 86ef360e22d..0f08633a426 100644 --- a/models/spring-ai-bedrock-converse/src/test/java/org/springframework/ai/bedrock/converse/BedrockProxyChatModelIT.java +++ b/models/spring-ai-bedrock-converse/src/test/java/org/springframework/ai/bedrock/converse/BedrockProxyChatModelIT.java @@ -234,8 +234,10 @@ void multiModalityTest() throws IOException { var imageData = new ClassPathResource("/test.png"); - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))) + .build(); var response = this.chatModel.call(new Prompt(List.of(userMessage))); diff --git a/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiChatModelIT.java b/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiChatModelIT.java index 0f493bec72b..bbf4d608d57 100644 --- a/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiChatModelIT.java +++ b/models/spring-ai-mistral-ai/src/test/java/org/springframework/ai/mistralai/MistralAiChatModelIT.java @@ -265,8 +265,10 @@ void streamFunctionCallTest() { void multiModalityEmbeddedImage(String modelName) { var imageData = new ClassPathResource("/test.png"); - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))) + .build(); var response = this.chatModel .call(new Prompt(List.of(userMessage), ChatOptions.builder().model(modelName).build())); @@ -279,11 +281,13 @@ void multiModalityEmbeddedImage(String modelName) { @ParameterizedTest(name = "{0} : {displayName} ") @ValueSource(strings = { "pixtral-large-latest" }) void multiModalityImageUrl(String modelName) throws IOException { - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(Media.builder() - .mimeType(MimeTypeUtils.IMAGE_PNG) - .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) - .build())); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(Media.builder() + .mimeType(MimeTypeUtils.IMAGE_PNG) + .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) + .build())) + .build(); ChatResponse response = this.chatModel .call(new Prompt(List.of(userMessage), ChatOptions.builder().model(modelName).build())); @@ -295,11 +299,13 @@ void multiModalityImageUrl(String modelName) throws IOException { @Test void streamingMultiModalityImageUrl() throws IOException { - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(Media.builder() - .mimeType(MimeTypeUtils.IMAGE_PNG) - .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) - .build())); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(Media.builder() + .mimeType(MimeTypeUtils.IMAGE_PNG) + .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) + .build())) + .build(); Flux response = this.streamingChatModel.stream(new Prompt(List.of(userMessage), ChatOptions.builder().model(MistralAiApi.ChatModel.PIXTRAL_LARGE.getValue()).build())); diff --git a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelMultimodalIT.java b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelMultimodalIT.java index d09c67954fd..28064bb7732 100644 --- a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelMultimodalIT.java +++ b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelMultimodalIT.java @@ -51,8 +51,10 @@ class OllamaChatModelMultimodalIT extends BaseOllamaIT { void unsupportedMediaType() { var imageData = new ClassPathResource("/norway.webp"); - var userMessage = new UserMessage("Explain what do you see in this picture?", - List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))); + var userMessage = UserMessage.builder() + .text("Explain what do you see in this picture?") + .media(List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))) + .build(); assertThatThrownBy(() -> this.chatModel.call(new Prompt(List.of(userMessage)))) .isInstanceOf(RuntimeException.class); @@ -62,8 +64,10 @@ void unsupportedMediaType() { void multiModalityTest() { var imageData = new ClassPathResource("/test.png"); - var userMessage = new UserMessage("Explain what do you see in this picture?", - List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))); + var userMessage = UserMessage.builder() + .text("Explain what do you see in this picture?") + .media(List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))) + .build(); var response = this.chatModel.call(new Prompt(List.of(userMessage))); diff --git a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/MessageTypeContentTests.java b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/MessageTypeContentTests.java index 03fa776748d..5a0968a4962 100644 --- a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/MessageTypeContentTests.java +++ b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/MessageTypeContentTests.java @@ -17,6 +17,7 @@ package org.springframework.ai.openai.chat; import java.net.MalformedURLException; +import java.net.URI; import java.net.URL; import java.util.List; import java.util.Map; @@ -127,8 +128,10 @@ public void userMessageWithMediaType() throws MalformedURLException { .willReturn(Mockito.mock(ResponseEntity.class)); URL mediaUrl = new URL("http://test"); - this.chatModel.call(new Prompt(List.of(new UserMessage("test message", - List.of(Media.builder().mimeType(MimeTypeUtils.IMAGE_JPEG).data(mediaUrl).build()))))); + this.chatModel.call(new Prompt(List.of(UserMessage.builder() + .text("test message") + .media(List.of(Media.builder().mimeType(MimeTypeUtils.IMAGE_JPEG).data(mediaUrl).build())) + .build()))); validateComplexContent(this.pomptCaptor.getValue()); } @@ -139,10 +142,11 @@ public void streamUserMessageWithMediaType() throws MalformedURLException { given(this.openAiApi.chatCompletionStream(this.pomptCaptor.capture(), this.headersCaptor.capture())) .willReturn(this.fluxResponse); - URL mediaUrl = new URL("http://test"); - this.chatModel.stream(new Prompt(List.of(new UserMessage("test message", - List.of(Media.builder().mimeType(MimeTypeUtils.IMAGE_JPEG).data(mediaUrl).build()))))) - .subscribe(); + URI mediaUrl = URI.create("http://test"); + this.chatModel.stream(new Prompt(List.of(UserMessage.builder() + .text("test message") + .media(List.of(Media.builder().mimeType(MimeTypeUtils.IMAGE_JPEG).data(mediaUrl).build())) + .build()))).subscribe(); validateComplexContent(this.pomptCaptor.getValue()); } diff --git a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/OpenAiChatModelIT.java b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/OpenAiChatModelIT.java index 241bb8849db..5910d31ace7 100644 --- a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/OpenAiChatModelIT.java +++ b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/OpenAiChatModelIT.java @@ -457,8 +457,10 @@ void multiModalityEmbeddedImage(String modelName) throws IOException { var imageData = new ClassPathResource("/test.png"); - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))) + .build(); var response = this.chatModel .call(new Prompt(List.of(userMessage), OpenAiChatOptions.builder().model(modelName).build())); @@ -472,11 +474,13 @@ void multiModalityEmbeddedImage(String modelName) throws IOException { @ValueSource(strings = { "gpt-4o" }) void multiModalityImageUrl(String modelName) throws IOException { - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(Media.builder() - .mimeType(MimeTypeUtils.IMAGE_PNG) - .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) - .build())); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(Media.builder() + .mimeType(MimeTypeUtils.IMAGE_PNG) + .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) + .build())) + .build(); ChatResponse response = this.chatModel .call(new Prompt(List.of(userMessage), OpenAiChatOptions.builder().model(modelName).build())); @@ -489,11 +493,13 @@ void multiModalityImageUrl(String modelName) throws IOException { @Test void streamingMultiModalityImageUrl() throws IOException { - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(Media.builder() - .mimeType(MimeTypeUtils.IMAGE_PNG) - .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) - .build())); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(Media.builder() + .mimeType(MimeTypeUtils.IMAGE_PNG) + .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) + .build())) + .build(); Flux response = this.streamingChatModel.stream(new Prompt(List.of(userMessage), OpenAiChatOptions.builder().model(OpenAiApi.ChatModel.GPT_4_O.getValue()).build())); @@ -552,8 +558,10 @@ void streamingMultiModalityOutputAudio(String modelName) throws IOException { @ValueSource(strings = { "gpt-4o-audio-preview" }) void multiModalityInputAudio(String modelName) { var audioResource = new ClassPathResource("speech1.mp3"); - var userMessage = new UserMessage("What is this recording about?", - List.of(new Media(MimeTypeUtils.parseMimeType("audio/mp3"), audioResource))); + var userMessage = UserMessage.builder() + .text("What is this recording about?") + .media(List.of(new Media(MimeTypeUtils.parseMimeType("audio/mp3"), audioResource))) + .build(); ChatResponse response = this.chatModel .call(new Prompt(List.of(userMessage), ChatOptions.builder().model(modelName).build())); @@ -567,8 +575,10 @@ void multiModalityInputAudio(String modelName) { @ValueSource(strings = { "gpt-4o-audio-preview" }) void streamingMultiModalityInputAudio(String modelName) { var audioResource = new ClassPathResource("speech1.mp3"); - var userMessage = new UserMessage("What is this recording about?", - List.of(new Media(MimeTypeUtils.parseMimeType("audio/mp3"), audioResource))); + var userMessage = UserMessage.builder() + .text("What is this recording about?") + .media(List.of(new Media(MimeTypeUtils.parseMimeType("audio/mp3"), audioResource))) + .build(); Flux response = this.chatModel .stream(new Prompt(List.of(userMessage), OpenAiChatOptions.builder().model(modelName).build())); diff --git a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/GroqWithOpenAiChatModelIT.java b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/GroqWithOpenAiChatModelIT.java index 1bb39e97a0e..e759c4cc17d 100644 --- a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/GroqWithOpenAiChatModelIT.java +++ b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/GroqWithOpenAiChatModelIT.java @@ -299,8 +299,10 @@ void multiModalityEmbeddedImage(String modelName) throws IOException { var imageData = new ClassPathResource("/test.png"); - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))) + .build(); var response = this.chatModel .call(new Prompt(List.of(userMessage), OpenAiChatOptions.builder().model(modelName).build())); @@ -315,11 +317,13 @@ void multiModalityEmbeddedImage(String modelName) throws IOException { @ValueSource(strings = { "llama3-70b-8192" }) void multiModalityImageUrl(String modelName) throws IOException { - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(Media.builder() - .mimeType(MimeTypeUtils.IMAGE_PNG) - .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) - .build())); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(Media.builder() + .mimeType(MimeTypeUtils.IMAGE_PNG) + .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) + .build())) + .build(); ChatResponse response = this.chatModel .call(new Prompt(List.of(userMessage), OpenAiChatOptions.builder().model(modelName).build())); @@ -333,11 +337,13 @@ void multiModalityImageUrl(String modelName) throws IOException { @Test void streamingMultiModalityImageUrl() throws IOException { - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(Media.builder() - .mimeType(MimeTypeUtils.IMAGE_PNG) - .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) - .build())); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(Media.builder() + .mimeType(MimeTypeUtils.IMAGE_PNG) + .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) + .build())) + .build(); Flux response = this.chatModel.stream(new Prompt(List.of(userMessage))); diff --git a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/MistralWithOpenAiChatModelIT.java b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/MistralWithOpenAiChatModelIT.java index 31898265ee8..386aec0a914 100644 --- a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/MistralWithOpenAiChatModelIT.java +++ b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/MistralWithOpenAiChatModelIT.java @@ -17,6 +17,7 @@ package org.springframework.ai.openai.chat.proxy; import java.io.IOException; +import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; @@ -305,8 +306,10 @@ void multiModalityEmbeddedImage(String modelName) throws IOException { var imageData = new ClassPathResource("/test.png"); - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))) + .build(); var response = this.chatModel .call(new Prompt(List.of(userMessage), OpenAiChatOptions.builder().model(modelName).build())); @@ -321,11 +324,13 @@ void multiModalityEmbeddedImage(String modelName) throws IOException { @ValueSource(strings = { "mistral-small-latest" }) void multiModalityImageUrl(String modelName) throws IOException { - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(Media.builder() - .mimeType(MimeTypeUtils.IMAGE_PNG) - .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) - .build())); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(Media.builder() + .mimeType(MimeTypeUtils.IMAGE_PNG) + .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) + .build())) + .build(); ChatResponse response = this.chatModel .call(new Prompt(List.of(userMessage), OpenAiChatOptions.builder().model(modelName).build())); @@ -339,11 +344,13 @@ void multiModalityImageUrl(String modelName) throws IOException { @Test void streamingMultiModalityImageUrl() throws IOException { - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(Media.builder() - .mimeType(MimeTypeUtils.IMAGE_PNG) - .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) - .build())); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(Media.builder() + .mimeType(MimeTypeUtils.IMAGE_PNG) + .data(URI.create("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) + .build())) + .build(); Flux response = this.chatModel.stream(new Prompt(List.of(userMessage))); diff --git a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/OllamaWithOpenAiChatModelIT.java b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/OllamaWithOpenAiChatModelIT.java index 33d278bd1ed..4c040bf16cd 100644 --- a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/OllamaWithOpenAiChatModelIT.java +++ b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/OllamaWithOpenAiChatModelIT.java @@ -17,6 +17,7 @@ package org.springframework.ai.openai.chat.proxy; import java.io.IOException; +import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; @@ -328,8 +329,10 @@ void multiModalityEmbeddedImage(String modelName) throws IOException { var imageData = new ClassPathResource("/test.png"); - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))) + .build(); var response = this.chatModel .call(new Prompt(List.of(userMessage), OpenAiChatOptions.builder().model(modelName).build())); @@ -344,11 +347,13 @@ void multiModalityEmbeddedImage(String modelName) throws IOException { @ValueSource(strings = { "llava" }) void multiModalityImageUrl(String modelName) throws IOException { - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(Media.builder() - .mimeType(MimeTypeUtils.IMAGE_PNG) - .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) - .build())); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(Media.builder() + .mimeType(MimeTypeUtils.IMAGE_PNG) + .data(URI.create("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) + .build())) + .build(); ChatResponse response = this.chatModel .call(new Prompt(List.of(userMessage), OpenAiChatOptions.builder().model(modelName).build())); @@ -363,11 +368,13 @@ void multiModalityImageUrl(String modelName) throws IOException { @ValueSource(strings = { "llava" }) void streamingMultiModalityImageUrl(String modelName) throws IOException { - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(Media.builder() - .mimeType(MimeTypeUtils.IMAGE_PNG) - .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) - .build())); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(Media.builder() + .mimeType(MimeTypeUtils.IMAGE_PNG) + .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) + .build())) + .build(); Flux response = this.chatModel .stream(new Prompt(List.of(userMessage), OpenAiChatOptions.builder().model(modelName).build())); diff --git a/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/CreateGeminiRequestTests.java b/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/CreateGeminiRequestTests.java index 802fab3b8a9..af061c0b01f 100644 --- a/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/CreateGeminiRequestTests.java +++ b/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/CreateGeminiRequestTests.java @@ -17,6 +17,7 @@ package org.springframework.ai.vertexai.gemini; import java.net.MalformedURLException; +import java.net.URI; import java.net.URL; import java.util.List; @@ -83,8 +84,11 @@ public void createRequestWithSystemMessage() throws MalformedURLException { var systemMessage = new SystemMessage("System Message Text"); - var userMessage = new UserMessage("User Message Text", - List.of(Media.builder().mimeType(MimeTypeUtils.IMAGE_PNG).data(new URL("http://example.com")).build())); + var userMessage = UserMessage.builder() + .text("User Message Text") + .media(List + .of(Media.builder().mimeType(MimeTypeUtils.IMAGE_PNG).data(URI.create("http://example.com")).build())) + .build(); var client = VertexAiGeminiChatModel.builder() .vertexAI(this.vertexAI) diff --git a/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/VertexAiGeminiChatModelIT.java b/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/VertexAiGeminiChatModelIT.java index 3dec24540eb..904e0a836d2 100644 --- a/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/VertexAiGeminiChatModelIT.java +++ b/models/spring-ai-vertex-ai-gemini/src/test/java/org/springframework/ai/vertexai/gemini/VertexAiGeminiChatModelIT.java @@ -246,8 +246,10 @@ void multiModalityTest() throws IOException { var data = new ClassPathResource("/vertex.test.png"); - var userMessage = new UserMessage("Explain what do you see o this picture?", - List.of(new Media(MimeTypeUtils.IMAGE_PNG, data))); + var userMessage = UserMessage.builder() + .text("Explain what do you see o this picture?") + .media(List.of(new Media(MimeTypeUtils.IMAGE_PNG, data))) + .build(); var response = this.chatModel.call(new Prompt(List.of(userMessage))); @@ -284,9 +286,10 @@ void multiModalityPdfTest() throws IOException { var pdfData = new ClassPathResource("/spring-ai-reference-overview.pdf"); - var userMessage = new UserMessage( - "You are a very professional document summarization specialist. Please summarize the given document.", - List.of(new Media(new MimeType("application", "pdf"), pdfData))); + var userMessage = UserMessage.builder() + .text("You are a very professional document summarization specialist. Please summarize the given document.") + .media(List.of(new Media(new MimeType("application", "pdf"), pdfData))) + .build(); var response = this.chatModel.call(new Prompt(List.of(userMessage))); diff --git a/models/spring-ai-zhipuai/src/test/java/org/springframework/ai/zhipuai/chat/ZhiPuAiChatModelIT.java b/models/spring-ai-zhipuai/src/test/java/org/springframework/ai/zhipuai/chat/ZhiPuAiChatModelIT.java index 34dc56a3af4..467d4d17978 100644 --- a/models/spring-ai-zhipuai/src/test/java/org/springframework/ai/zhipuai/chat/ZhiPuAiChatModelIT.java +++ b/models/spring-ai-zhipuai/src/test/java/org/springframework/ai/zhipuai/chat/ZhiPuAiChatModelIT.java @@ -17,6 +17,7 @@ package org.springframework.ai.zhipuai.chat; import java.io.IOException; +import java.net.URI; import java.net.URL; import java.util.ArrayList; import java.util.Arrays; @@ -284,8 +285,10 @@ void multiModalityEmbeddedImage(String modelName) throws IOException { var imageData = new ClassPathResource("/test.png"); - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(new Media(MimeTypeUtils.IMAGE_PNG, imageData))) + .build(); var response = this.chatModel .call(new Prompt(List.of(userMessage), ZhiPuAiChatOptions.builder().model(modelName).build())); @@ -299,11 +302,13 @@ void multiModalityEmbeddedImage(String modelName) throws IOException { @ValueSource(strings = { "glm-4v" }) void multiModalityImageUrl(String modelName) throws IOException { - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(Media.builder() - .mimeType(MimeTypeUtils.IMAGE_PNG) - .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) - .build())); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(Media.builder() + .mimeType(MimeTypeUtils.IMAGE_PNG) + .data(URI.create("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) + .build())) + .build(); ChatResponse response = this.chatModel .call(new Prompt(List.of(userMessage), ZhiPuAiChatOptions.builder().model(modelName).build())); @@ -316,11 +321,13 @@ void multiModalityImageUrl(String modelName) throws IOException { @Test void streamingMultiModalityImageUrl() throws IOException { - var userMessage = new UserMessage("Explain what do you see on this picture?", - List.of(Media.builder() - .mimeType(MimeTypeUtils.IMAGE_PNG) - .data(new URL("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) - .build())); + var userMessage = UserMessage.builder() + .text("Explain what do you see on this picture?") + .media(List.of(Media.builder() + .mimeType(MimeTypeUtils.IMAGE_PNG) + .data(URI.create("https://docs.spring.io/spring-ai/reference/_images/multimodal.test.png")) + .build())) + .build(); Flux response = this.streamingChatModel.stream(new Prompt(List.of(userMessage), ZhiPuAiChatOptions.builder().model(ZhiPuAiApi.ChatModel.GLM_4V.getValue()).build())); diff --git a/spring-ai-client-chat/src/main/java/org/springframework/ai/chat/client/advisor/MessageChatMemoryAdvisor.java b/spring-ai-client-chat/src/main/java/org/springframework/ai/chat/client/advisor/MessageChatMemoryAdvisor.java index cd1c53cb301..6a563e52025 100644 --- a/spring-ai-client-chat/src/main/java/org/springframework/ai/chat/client/advisor/MessageChatMemoryAdvisor.java +++ b/spring-ai-client-chat/src/main/java/org/springframework/ai/chat/client/advisor/MessageChatMemoryAdvisor.java @@ -94,7 +94,7 @@ private AdvisedRequest before(AdvisedRequest request) { AdvisedRequest advisedRequest = AdvisedRequest.from(request).messages(advisedMessages).build(); // 4. Add the new user input to the conversation memory. - UserMessage userMessage = new UserMessage(request.userText(), request.media()); + UserMessage userMessage = UserMessage.builder().text(request.userText()).media(request.media()).build(); this.getChatMemoryStore().add(this.doGetConversationId(request.adviseContext()), userMessage); return advisedRequest; diff --git a/spring-ai-client-chat/src/main/java/org/springframework/ai/chat/client/advisor/PromptChatMemoryAdvisor.java b/spring-ai-client-chat/src/main/java/org/springframework/ai/chat/client/advisor/PromptChatMemoryAdvisor.java index e049cecfc62..e8d919dd794 100644 --- a/spring-ai-client-chat/src/main/java/org/springframework/ai/chat/client/advisor/PromptChatMemoryAdvisor.java +++ b/spring-ai-client-chat/src/main/java/org/springframework/ai/chat/client/advisor/PromptChatMemoryAdvisor.java @@ -130,7 +130,7 @@ private AdvisedRequest before(AdvisedRequest request) { .build(); // 4. Add the new user input to the conversation memory. - UserMessage userMessage = new UserMessage(request.userText(), request.media()); + UserMessage userMessage = UserMessage.builder().text(request.userText()).media(request.media()).build(); this.getChatMemoryStore().add(this.doGetConversationId(request.adviseContext()), userMessage); return advisedRequest; diff --git a/spring-ai-client-chat/src/main/java/org/springframework/ai/chat/client/advisor/api/AdvisedRequest.java b/spring-ai-client-chat/src/main/java/org/springframework/ai/chat/client/advisor/api/AdvisedRequest.java index e22545f9c57..4d8352eda96 100644 --- a/spring-ai-client-chat/src/main/java/org/springframework/ai/chat/client/advisor/api/AdvisedRequest.java +++ b/spring-ai-client-chat/src/main/java/org/springframework/ai/chat/client/advisor/api/AdvisedRequest.java @@ -246,7 +246,7 @@ public Prompt toPrompt(TemplateRenderer templateRenderer) { .build() .render(); } - messages.add(new UserMessage(processedUserText, this.media())); + messages.add(UserMessage.builder().text(processedUserText).media(this.media()).build()); } if (this.chatOptions() instanceof ToolCallingChatOptions toolCallingChatOptions) { diff --git a/spring-ai-client-chat/src/test/java/org/springframework/ai/chat/client/ChatClientTest.java b/spring-ai-client-chat/src/test/java/org/springframework/ai/chat/client/ChatClientTest.java index b8d4b1c00d1..87285182f74 100644 --- a/spring-ai-client-chat/src/test/java/org/springframework/ai/chat/client/ChatClientTest.java +++ b/spring-ai-client-chat/src/test/java/org/springframework/ai/chat/client/ChatClientTest.java @@ -478,7 +478,7 @@ void simpleUserPromptObject() { var media = new Media(MimeTypeUtils.IMAGE_JPEG, new DefaultResourceLoader().getResource("classpath:/bikes.json")); - UserMessage message = new UserMessage("User prompt", List.of(media)); + UserMessage message = UserMessage.builder().text("User prompt").media(List.of(media)).build(); Prompt prompt = new Prompt(message); assertThat(ChatClient.builder(this.chatModel).build().prompt(prompt).call().content()).isEqualTo("response"); @@ -605,7 +605,7 @@ void whenPromptWithMessages() { .willReturn(new ChatResponse(List.of(new Generation(new AssistantMessage("response"))))); var chatClient = ChatClient.builder(this.chatModel).build(); - var prompt = new Prompt(new SystemMessage("instructions"), new UserMessage("my question")); + var prompt = new Prompt(new SystemMessage("instructions"), UserMessage.builder().text("my question").build()); var content = chatClient.prompt(prompt).call().content(); assertThat(content).isEqualTo("response"); diff --git a/spring-ai-model/src/main/java/org/springframework/ai/chat/messages/UserMessage.java b/spring-ai-model/src/main/java/org/springframework/ai/chat/messages/UserMessage.java index 52caeddf75e..499949153f6 100644 --- a/spring-ai-model/src/main/java/org/springframework/ai/chat/messages/UserMessage.java +++ b/spring-ai-model/src/main/java/org/springframework/ai/chat/messages/UserMessage.java @@ -44,47 +44,15 @@ public UserMessage(String textContent) { this(textContent, new ArrayList<>(), Map.of()); } - public UserMessage(Resource resource) { - this(MessageUtils.readResource(resource)); - } - - /** - * @deprecated use {@link #builder()} instead. - */ - @Deprecated - public UserMessage(String textContent, List media) { - this(MessageType.USER, textContent, media, Map.of()); - } - - /** - * @deprecated use {@link #builder()} instead. - */ - @Deprecated - public UserMessage(String textContent, Media... media) { - this(textContent, Arrays.asList(media)); - } - - /** - * @deprecated use {@link #builder()} instead. Will be made private in the next - * release. - */ - @Deprecated - public UserMessage(String textContent, Collection media, Map metadata) { + private UserMessage(String textContent, Collection media, Map metadata) { super(MessageType.USER, textContent, metadata); Assert.notNull(media, "media cannot be null"); Assert.noNullElements(media, "media cannot have null elements"); this.media = new ArrayList<>(media); } - /** - * @deprecated use {@link #builder()} instead. - */ - @Deprecated - public UserMessage(MessageType messageType, String textContent, Collection media, - Map metadata) { - super(messageType, textContent, metadata); - Assert.notNull(media, "media data must not be null"); - this.media = new ArrayList<>(media); + public UserMessage(Resource resource) { + this(MessageUtils.readResource(resource)); } @Override @@ -105,7 +73,7 @@ public List getMedia() { } public UserMessage copy() { - return new UserMessage(getText(), List.copyOf(getMedia()), Map.copyOf(getMetadata())); + return new Builder().text(getText()).media(List.copyOf(getMedia())).metadata(Map.copyOf(getMetadata())).build(); } public Builder mutate() {