diff --git a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelTests.java b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelTests.java index 4e916ad5d43..bd8d83e5a7c 100644 --- a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelTests.java +++ b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaChatModelTests.java @@ -325,4 +325,28 @@ void buildOllamaChatModelImmutability() { assertThat(chatModel2).isNotNull(); } + @Test + void buildChatResponseMetadataWithZeroValues() { + // Test with all zero/minimal values + OllamaApi.ChatResponse response = new OllamaApi.ChatResponse("model", Instant.now(), null, null, null, 0L, 0L, + 0, 0L, 0, 0L); + + ChatResponseMetadata metadata = OllamaChatModel.from(response, null); + + assertEquals(Duration.ZERO, metadata.get("eval-duration")); + assertEquals(Integer.valueOf(0), metadata.get("eval-count")); + assertEquals(Duration.ZERO, metadata.get("prompt-eval-duration")); + assertEquals(Integer.valueOf(0), metadata.get("prompt-eval-count")); + assertThat(metadata.getUsage()).isEqualTo(new DefaultUsage(0, 0)); + } + + @Test + void buildOllamaChatModelWithMinimalConfiguration() { + // Test building with only required parameters + ChatModel chatModel = OllamaChatModel.builder().ollamaApi(this.ollamaApi).build(); + + assertThat(chatModel).isNotNull(); + assertThat(chatModel).isInstanceOf(OllamaChatModel.class); + } + } diff --git a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaEmbeddingRequestTests.java b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaEmbeddingRequestTests.java index d544695d632..5417590dfb1 100644 --- a/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaEmbeddingRequestTests.java +++ b/models/spring-ai-ollama/src/test/java/org/springframework/ai/ollama/OllamaEmbeddingRequestTests.java @@ -185,4 +185,44 @@ public void ollamaEmbeddingRequestWithWhitespaceInputs() { assertThat(ollamaRequest.input()).containsExactly("", " ", "\t\n", "normal text", " spaced "); } + @Test + public void ollamaEmbeddingRequestWithNullInput() { + // Test behavior when input list contains null values + List inputsWithNull = Arrays.asList("Hello", null, "World"); + var embeddingRequest = this.embeddingModel.buildEmbeddingRequest(new EmbeddingRequest(inputsWithNull, null)); + var ollamaRequest = this.embeddingModel.ollamaEmbeddingRequest(embeddingRequest); + + assertThat(ollamaRequest.input()).containsExactly("Hello", null, "World"); + assertThat(ollamaRequest.input()).hasSize(3); + } + + @Test + public void ollamaEmbeddingRequestPartialOptionsOverride() { + // Test that only specified options are overridden, others remain default + var requestOptions = OllamaOptions.builder() + .model("PARTIAL_OVERRIDE_MODEL") + .numGPU(5) // Override only numGPU, leave others as default + .build(); + + var embeddingRequest = this.embeddingModel + .buildEmbeddingRequest(new EmbeddingRequest(List.of("Partial override"), requestOptions)); + var ollamaRequest = this.embeddingModel.ollamaEmbeddingRequest(embeddingRequest); + + assertThat(ollamaRequest.model()).isEqualTo("PARTIAL_OVERRIDE_MODEL"); + assertThat(ollamaRequest.options().get("num_gpu")).isEqualTo(5); + assertThat(ollamaRequest.options().get("main_gpu")).isEqualTo(11); + assertThat(ollamaRequest.options().get("use_mmap")).isEqualTo(true); + } + + @Test + public void ollamaEmbeddingRequestWithEmptyStringInput() { + // Test with list containing only empty string + var embeddingRequest = this.embeddingModel.buildEmbeddingRequest(new EmbeddingRequest(List.of(""), null)); + var ollamaRequest = this.embeddingModel.ollamaEmbeddingRequest(embeddingRequest); + + assertThat(ollamaRequest.input()).hasSize(1); + assertThat(ollamaRequest.input().get(0)).isEmpty(); + assertThat(ollamaRequest.model()).isEqualTo("DEFAULT_MODEL"); + } + }