Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.testcontainers.containers.SocatContainer;
import org.testcontainers.containers.DockerModelRunnerContainer;
import org.testcontainers.junit.jupiter.Container;
import org.testcontainers.junit.jupiter.Testcontainers;
import reactor.core.publisher.Flux;
Expand Down Expand Up @@ -69,15 +69,16 @@
*/
@Testcontainers
@SpringBootTest(classes = DockerModelRunnerWithOpenAiChatModelIT.Config.class)
@Disabled("Requires Docker Model Runner enabled. See https://docs.docker.com/desktop/features/model-runner/")
// @Disabled("Requires Docker Model Runner enabled. See
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@eddumelendez While merging, I am marking this IT test as "Disabled" until the Spring AI IT infrastructure is enabled with docker model runner.

// https://docs.docker.com/desktop/features/model-runner/")
class DockerModelRunnerWithOpenAiChatModelIT {

private static final Logger logger = LoggerFactory.getLogger(DockerModelRunnerWithOpenAiChatModelIT.class);

private static final String DEFAULT_MODEL = "ai/gemma3:4B-F16";

@Container
private static final SocatContainer socat = new SocatContainer().withTarget(80, "model-runner.docker.internal");
private static final DockerModelRunnerContainer DMR = new DockerModelRunnerContainer("alpine/socat:1.7.4.3-r0");

@Value("classpath:/prompts/system-message.st")
private Resource systemResource;
Expand All @@ -89,7 +90,7 @@ class DockerModelRunnerWithOpenAiChatModelIT {
public static void beforeAll() throws IOException, InterruptedException {
logger.info("Start pulling the '" + DEFAULT_MODEL + "' generative ... would take several minutes ...");

String baseUrl = "http://%s:%d".formatted(socat.getHost(), socat.getMappedPort(80));
String baseUrl = "http://%s:%d".formatted(DMR.getHost(), DMR.getMappedPort(80));

RestAssured.given().baseUri(baseUrl).body("""
{
Expand Down Expand Up @@ -352,8 +353,7 @@ static class Config {

@Bean
public OpenAiApi chatCompletionApi() {
var baseUrl = "http://%s:%d/engines".formatted(socat.getHost(), socat.getMappedPort(80));
return OpenAiApi.builder().baseUrl(baseUrl).apiKey("test").build();
return OpenAiApi.builder().baseUrl(DMR.getOpenAIEndpoint()).apiKey("test").build();
}

@Bean
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,11 +27,11 @@ Option 2:
[source,java]
----
@Container
private static final SocatContainer socat = new SocatContainer().withTarget(80, "model-runner.docker.internal");
private static final DockerModelRunnerContainer DMR = new DockerModelRunnerContainer("alpine/socat:1.7.4.3-r0");

@Bean
public OpenAiApi chatCompletionApi() {
var baseUrl = "http://%s:%d/engines".formatted(socat.getHost(), socat.getMappedPort(80));
var baseUrl = DMR.getOpenAIEndpoint();
return OpenAiApi.builder().baseUrl(baseUrl).apiKey("test").build();
}
----
Expand Down