diff --git a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/DockerModelRunnerWithOpenAiChatModelIT.java b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/DockerModelRunnerWithOpenAiChatModelIT.java index 5533e4185ca..b315e06bd72 100644 --- a/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/DockerModelRunnerWithOpenAiChatModelIT.java +++ b/models/spring-ai-openai/src/test/java/org/springframework/ai/openai/chat/proxy/DockerModelRunnerWithOpenAiChatModelIT.java @@ -29,7 +29,7 @@ import org.junit.jupiter.api.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.testcontainers.containers.SocatContainer; +import org.testcontainers.containers.DockerModelRunnerContainer; import org.testcontainers.junit.jupiter.Container; import org.testcontainers.junit.jupiter.Testcontainers; import reactor.core.publisher.Flux; @@ -69,7 +69,8 @@ */ @Testcontainers @SpringBootTest(classes = DockerModelRunnerWithOpenAiChatModelIT.Config.class) -@Disabled("Requires Docker Model Runner enabled. See https://docs.docker.com/desktop/features/model-runner/") +// @Disabled("Requires Docker Model Runner enabled. See +// https://docs.docker.com/desktop/features/model-runner/") class DockerModelRunnerWithOpenAiChatModelIT { private static final Logger logger = LoggerFactory.getLogger(DockerModelRunnerWithOpenAiChatModelIT.class); @@ -77,7 +78,7 @@ class DockerModelRunnerWithOpenAiChatModelIT { private static final String DEFAULT_MODEL = "ai/gemma3:4B-F16"; @Container - private static final SocatContainer socat = new SocatContainer().withTarget(80, "model-runner.docker.internal"); + private static final DockerModelRunnerContainer DMR = new DockerModelRunnerContainer("alpine/socat:1.7.4.3-r0"); @Value("classpath:/prompts/system-message.st") private Resource systemResource; @@ -89,7 +90,7 @@ class DockerModelRunnerWithOpenAiChatModelIT { public static void beforeAll() throws IOException, InterruptedException { logger.info("Start pulling the '" + DEFAULT_MODEL + "' generative ... would take several minutes ..."); - String baseUrl = "http://%s:%d".formatted(socat.getHost(), socat.getMappedPort(80)); + String baseUrl = "http://%s:%d".formatted(DMR.getHost(), DMR.getMappedPort(80)); RestAssured.given().baseUri(baseUrl).body(""" { @@ -352,8 +353,7 @@ static class Config { @Bean public OpenAiApi chatCompletionApi() { - var baseUrl = "http://%s:%d/engines".formatted(socat.getHost(), socat.getMappedPort(80)); - return OpenAiApi.builder().baseUrl(baseUrl).apiKey("test").build(); + return OpenAiApi.builder().baseUrl(DMR.getOpenAIEndpoint()).apiKey("test").build(); } @Bean diff --git a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/dmr-chat.adoc b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/dmr-chat.adoc index 4cbdd1de0a3..1f5f7bf5dd3 100644 --- a/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/dmr-chat.adoc +++ b/spring-ai-docs/src/main/antora/modules/ROOT/pages/api/chat/dmr-chat.adoc @@ -27,11 +27,11 @@ Option 2: [source,java] ---- @Container -private static final SocatContainer socat = new SocatContainer().withTarget(80, "model-runner.docker.internal"); +private static final DockerModelRunnerContainer DMR = new DockerModelRunnerContainer("alpine/socat:1.7.4.3-r0"); @Bean public OpenAiApi chatCompletionApi() { - var baseUrl = "http://%s:%d/engines".formatted(socat.getHost(), socat.getMappedPort(80)); + var baseUrl = DMR.getOpenAIEndpoint(); return OpenAiApi.builder().baseUrl(baseUrl).apiKey("test").build(); } ----