|
23 | 23 | import io.micrometer.observation.ObservationRegistry;
|
24 | 24 | import org.junit.jupiter.api.Test;
|
25 | 25 | import org.junit.jupiter.api.extension.ExtendWith;
|
| 26 | +import org.junit.jupiter.params.ParameterizedTest; |
| 27 | +import org.junit.jupiter.params.provider.ValueSource; |
26 | 28 | import org.mockito.Mock;
|
27 | 29 | import org.mockito.junit.jupiter.MockitoExtension;
|
28 | 30 |
|
|
38 | 40 | import org.springframework.ai.retry.RetryUtils;
|
39 | 41 |
|
40 | 42 | import static org.assertj.core.api.Assertions.assertThat;
|
41 |
| -import static org.junit.jupiter.api.Assertions.*; |
| 43 | +import static org.assertj.core.api.Assertions.assertThatThrownBy; |
| 44 | +import static org.junit.jupiter.api.Assertions.assertEquals; |
| 45 | +import static org.junit.jupiter.api.Assertions.assertNull; |
| 46 | +import static org.junit.jupiter.api.Assertions.assertThrows; |
42 | 47 |
|
43 | 48 | /**
|
44 | 49 | * @author Jihoon Kim
|
@@ -171,4 +176,153 @@ void buildChatResponseMetadataAggregationWithNonEmptyMetadataButEmptyEval() {
|
171 | 176 |
|
172 | 177 | }
|
173 | 178 |
|
| 179 | + @Test |
| 180 | + void buildOllamaChatModelWithNullOllamaApi() { |
| 181 | + assertThatThrownBy(() -> OllamaChatModel.builder().ollamaApi(null).build()) |
| 182 | + .isInstanceOf(IllegalArgumentException.class) |
| 183 | + .hasMessageContaining("ollamaApi must not be null"); |
| 184 | + } |
| 185 | + |
| 186 | + @Test |
| 187 | + void buildOllamaChatModelWithAllBuilderOptions() { |
| 188 | + OllamaOptions options = OllamaOptions.builder().model(OllamaModel.CODELLAMA).temperature(0.7).topK(50).build(); |
| 189 | + |
| 190 | + ToolCallingManager toolManager = ToolCallingManager.builder().build(); |
| 191 | + ModelManagementOptions managementOptions = ModelManagementOptions.builder().build(); |
| 192 | + |
| 193 | + ChatModel chatModel = OllamaChatModel.builder() |
| 194 | + .ollamaApi(this.ollamaApi) |
| 195 | + .defaultOptions(options) |
| 196 | + .toolCallingManager(toolManager) |
| 197 | + .retryTemplate(RetryUtils.DEFAULT_RETRY_TEMPLATE) |
| 198 | + .observationRegistry(ObservationRegistry.NOOP) |
| 199 | + .modelManagementOptions(managementOptions) |
| 200 | + .build(); |
| 201 | + |
| 202 | + assertThat(chatModel).isNotNull(); |
| 203 | + assertThat(chatModel).isInstanceOf(OllamaChatModel.class); |
| 204 | + } |
| 205 | + |
| 206 | + @Test |
| 207 | + void buildChatResponseMetadataWithLargeValues() { |
| 208 | + Long evalDuration = Long.MAX_VALUE; |
| 209 | + Integer evalCount = Integer.MAX_VALUE; |
| 210 | + Integer promptEvalCount = Integer.MAX_VALUE; |
| 211 | + Long promptEvalDuration = Long.MAX_VALUE; |
| 212 | + |
| 213 | + OllamaApi.ChatResponse response = new OllamaApi.ChatResponse("model", Instant.now(), null, null, null, |
| 214 | + Long.MAX_VALUE, Long.MAX_VALUE, promptEvalCount, promptEvalDuration, evalCount, evalDuration); |
| 215 | + |
| 216 | + ChatResponseMetadata metadata = OllamaChatModel.from(response, null); |
| 217 | + |
| 218 | + assertEquals(Duration.ofNanos(evalDuration), metadata.get("eval-duration")); |
| 219 | + assertEquals(evalCount, metadata.get("eval-count")); |
| 220 | + assertEquals(Duration.ofNanos(promptEvalDuration), metadata.get("prompt-eval-duration")); |
| 221 | + assertEquals(promptEvalCount, metadata.get("prompt-eval-count")); |
| 222 | + } |
| 223 | + |
| 224 | + @Test |
| 225 | + void buildChatResponseMetadataAggregationWithNullPrevious() { |
| 226 | + Long evalDuration = 1000L; |
| 227 | + Integer evalCount = 101; |
| 228 | + Integer promptEvalCount = 808; |
| 229 | + Long promptEvalDuration = 8L; |
| 230 | + |
| 231 | + OllamaApi.ChatResponse response = new OllamaApi.ChatResponse("model", Instant.now(), null, null, null, 2000L, |
| 232 | + 100L, promptEvalCount, promptEvalDuration, evalCount, evalDuration); |
| 233 | + |
| 234 | + ChatResponseMetadata metadata = OllamaChatModel.from(response, null); |
| 235 | + |
| 236 | + assertThat(metadata.getUsage()).isEqualTo(new DefaultUsage(promptEvalCount, evalCount)); |
| 237 | + assertEquals(Duration.ofNanos(evalDuration), metadata.get("eval-duration")); |
| 238 | + assertEquals(evalCount, metadata.get("eval-count")); |
| 239 | + assertEquals(Duration.ofNanos(promptEvalDuration), metadata.get("prompt-eval-duration")); |
| 240 | + assertEquals(promptEvalCount, metadata.get("prompt-eval-count")); |
| 241 | + } |
| 242 | + |
| 243 | + @ParameterizedTest |
| 244 | + @ValueSource(strings = { "LLAMA2", "MISTRAL", "CODELLAMA", "LLAMA3", "GEMMA" }) |
| 245 | + void buildOllamaChatModelWithDifferentModels(String modelName) { |
| 246 | + OllamaModel model = OllamaModel.valueOf(modelName); |
| 247 | + OllamaOptions options = OllamaOptions.builder().model(model).build(); |
| 248 | + |
| 249 | + ChatModel chatModel = OllamaChatModel.builder().ollamaApi(this.ollamaApi).defaultOptions(options).build(); |
| 250 | + |
| 251 | + assertThat(chatModel).isNotNull(); |
| 252 | + assertThat(chatModel).isInstanceOf(OllamaChatModel.class); |
| 253 | + } |
| 254 | + |
| 255 | + @Test |
| 256 | + void buildOllamaChatModelWithCustomObservationRegistry() { |
| 257 | + ObservationRegistry customRegistry = ObservationRegistry.create(); |
| 258 | + |
| 259 | + ChatModel chatModel = OllamaChatModel.builder() |
| 260 | + .ollamaApi(this.ollamaApi) |
| 261 | + .observationRegistry(customRegistry) |
| 262 | + .build(); |
| 263 | + |
| 264 | + assertThat(chatModel).isNotNull(); |
| 265 | + } |
| 266 | + |
| 267 | + @Test |
| 268 | + void buildChatResponseMetadataPreservesModelName() { |
| 269 | + String modelName = "custom-model-name"; |
| 270 | + OllamaApi.ChatResponse response = new OllamaApi.ChatResponse(modelName, Instant.now(), null, null, null, 1000L, |
| 271 | + 100L, 10, 50L, 20, 200L); |
| 272 | + |
| 273 | + ChatResponseMetadata metadata = OllamaChatModel.from(response, null); |
| 274 | + |
| 275 | + // Verify that model information is preserved in metadata |
| 276 | + assertThat(metadata).isNotNull(); |
| 277 | + // Note: The exact key for model name would depend on the implementation |
| 278 | + // This test verifies that metadata building doesn't lose model information |
| 279 | + } |
| 280 | + |
| 281 | + @Test |
| 282 | + void buildChatResponseMetadataWithInstantTime() { |
| 283 | + Instant createdAt = Instant.now(); |
| 284 | + OllamaApi.ChatResponse response = new OllamaApi.ChatResponse("model", createdAt, null, null, null, 1000L, 100L, |
| 285 | + 10, 50L, 20, 200L); |
| 286 | + |
| 287 | + ChatResponseMetadata metadata = OllamaChatModel.from(response, null); |
| 288 | + |
| 289 | + assertThat(metadata).isNotNull(); |
| 290 | + // Verify timestamp is preserved (exact key depends on implementation) |
| 291 | + } |
| 292 | + |
| 293 | + @Test |
| 294 | + void buildChatResponseMetadataAggregationOverflowHandling() { |
| 295 | + // Test potential integer overflow scenarios |
| 296 | + OllamaApi.ChatResponse response = new OllamaApi.ChatResponse("model", Instant.now(), null, null, null, 1000L, |
| 297 | + 100L, Integer.MAX_VALUE, Long.MAX_VALUE, Integer.MAX_VALUE, Long.MAX_VALUE); |
| 298 | + |
| 299 | + ChatResponse previousChatResponse = ChatResponse.builder() |
| 300 | + .generations(List.of()) |
| 301 | + .metadata(ChatResponseMetadata.builder() |
| 302 | + .usage(new DefaultUsage(1, 1)) |
| 303 | + .keyValue("eval-duration", Duration.ofNanos(1L)) |
| 304 | + .keyValue("prompt-eval-duration", Duration.ofNanos(1L)) |
| 305 | + .build()) |
| 306 | + .build(); |
| 307 | + |
| 308 | + // This should not throw an exception, even with potential overflow |
| 309 | + ChatResponseMetadata metadata = OllamaChatModel.from(response, previousChatResponse); |
| 310 | + assertThat(metadata).isNotNull(); |
| 311 | + } |
| 312 | + |
| 313 | + @Test |
| 314 | + void buildOllamaChatModelImmutability() { |
| 315 | + // Test that the builder creates immutable instances |
| 316 | + OllamaOptions options = OllamaOptions.builder().model(OllamaModel.MISTRAL).temperature(0.5).build(); |
| 317 | + |
| 318 | + ChatModel chatModel1 = OllamaChatModel.builder().ollamaApi(this.ollamaApi).defaultOptions(options).build(); |
| 319 | + |
| 320 | + ChatModel chatModel2 = OllamaChatModel.builder().ollamaApi(this.ollamaApi).defaultOptions(options).build(); |
| 321 | + |
| 322 | + // Should create different instances |
| 323 | + assertThat(chatModel1).isNotSameAs(chatModel2); |
| 324 | + assertThat(chatModel1).isNotNull(); |
| 325 | + assertThat(chatModel2).isNotNull(); |
| 326 | + } |
| 327 | + |
174 | 328 | }
|
0 commit comments