diff --git a/components/camel-ai/camel-openai/pom.xml b/components/camel-ai/camel-openai/pom.xml
index f63077ac32c35..149b9bc45325d 100644
--- a/components/camel-ai/camel-openai/pom.xml
+++ b/components/camel-ai/camel-openai/pom.xml
@@ -33,6 +33,10 @@
Camel :: AI :: OpenAI
Camel OpenAI component for chat completion using OpenAI API
+
+ 3
+
+
org.apache.camel
@@ -66,5 +70,17 @@
camel-jackson
test
+
+ org.apache.camel
+ camel-test-infra-ollama
+ ${project.version}
+ test-jar
+ test
+
+
+ org.assertj
+ assertj-core
+ test
+
diff --git a/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIProducer.java b/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIProducer.java
index 3ccd26bf56118..5cbe1350bbbcf 100644
--- a/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIProducer.java
+++ b/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIProducer.java
@@ -206,7 +206,14 @@ private List buildMessages(Exchange exchange, OpenAI
addConversationHistory(messages, in, config);
ChatCompletionMessageParam userMessage = buildUserMessage(in, config);
- messages.add(userMessage);
+ if (userMessage != null) {
+ messages.add(userMessage);
+ }
+
+ if (messages.isEmpty()) {
+ throw new IllegalArgumentException(
+ "No input provided to LLM. At least one message (user, system, or developer) must be provided");
+ }
return messages;
}
@@ -243,9 +250,8 @@ private ChatCompletionMessageParam buildUserMessage(Message in, OpenAIConfigurat
private ChatCompletionMessageParam buildTextMessage(Message in, String userPrompt, OpenAIConfiguration config) {
String prompt = userPrompt != null ? userPrompt : in.getBody(String.class);
- if (prompt == null || prompt.isEmpty()) {
- throw new IllegalArgumentException(
- "Message body or user message configuration must contain the prompt text");
+ if (prompt == null || prompt.trim().isEmpty()) {
+ return null;
}
return createTextMessage(prompt);
}
diff --git a/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAIChatCompletionIT.java b/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAIChatCompletionIT.java
new file mode 100644
index 0000000000000..194e66ca73549
--- /dev/null
+++ b/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAIChatCompletionIT.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.openai.integration;
+
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.component.mock.MockEndpoint;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+
+@DisabledIfSystemProperty(named = "ci.env.name", matches = ".*",
+ disabledReason = "Requires too much network resources")
+public class OpenAIChatCompletionIT extends OpenAITestSupport {
+
+ @Override
+ protected RouteBuilder createRouteBuilder() {
+ return new RouteBuilder() {
+ @Override
+ public void configure() {
+ // Route for simple message test
+ from("direct:send-simple-message")
+ .toF("openai:chat-completion?apiKey=%s&baseUrl=%s&model=%s", apiKey, baseUrl, model)
+ .to("mock:response");
+ }
+ };
+ }
+
+ @Test
+ public void testSendSimpleStringMessage() throws Exception {
+ // Setup mock endpoint expectations
+ MockEndpoint mockResponse = getMockEndpoint("mock:response");
+ mockResponse.expectedMessageCount(1);
+
+ // Send a test message to the OpenAI endpoint
+ String response = template.requestBody("direct:send-simple-message",
+ "What is Apache Camel?",
+ String.class);
+
+ // Verify the mock endpoint received the message
+ mockResponse.assertIsSatisfied();
+
+ // Verify response is not null and contains meaningful content
+ assertThat(response).isNotNull();
+ assertThat(response).isNotEmpty();
+ assertThat(response.length()).isGreaterThan(10);
+
+ assertThat(response).contains("Camel");
+ assertThat(response).contains("Apache");
+ assertThat(response).contains("integration");
+ }
+
+ @Test
+ public void testEmptyMessageThrowsException() {
+ // Verify that empty messages result in an IllegalArgumentException
+ Exception exception = assertThrows(Exception.class, () -> {
+ template.requestBody("direct:send-simple-message", "", String.class);
+ });
+
+ // Verify the exception is an IllegalArgumentException about empty input
+ assertThat(exception.getCause()).isInstanceOf(IllegalArgumentException.class);
+ assertThat(exception.getCause().getMessage()).contains("No input provided to LLM");
+ }
+}
diff --git a/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAITestSupport.java b/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAITestSupport.java
new file mode 100644
index 0000000000000..0f4da0e4d0ac3
--- /dev/null
+++ b/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAITestSupport.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.openai.integration;
+
+import org.apache.camel.test.infra.ollama.services.OllamaService;
+import org.apache.camel.test.infra.ollama.services.OllamaServiceFactory;
+import org.apache.camel.test.junit5.CamelTestSupport;
+
+public class OpenAITestSupport extends CamelTestSupport {
+
+ protected String apiKey;
+ protected String baseUrl;
+ protected String model;
+
+ static OllamaService OLLAMA = hasEnvironmentConfiguration()
+ ? null
+ : OllamaServiceFactory.createSingletonService();
+
+ @Override
+ protected void setupResources() throws Exception {
+ super.setupResources();
+
+ if (OLLAMA != null) {
+ // Use Ollama service
+ baseUrl = OLLAMA.baseUrlV1();
+ model = OLLAMA.modelName();
+ apiKey = "dummy"; // Ollama doesn't require API key
+ } else {
+ // Use environment variables
+ apiKey = System.getenv("OPENAI_API_KEY");
+ baseUrl = System.getenv("OPENAI_BASE_URL"); // Optional
+ model = System.getenv("OPENAI_MODEL"); // Optional
+ }
+ }
+
+ protected static boolean hasEnvironmentConfiguration() {
+ String apiKey = System.getenv("OPENAI_API_KEY");
+ return apiKey != null && !apiKey.trim().isEmpty();
+ }
+}
diff --git a/components/camel-ai/camel-openai/test_execution.md b/components/camel-ai/camel-openai/test_execution.md
new file mode 100644
index 0000000000000..65f8d672b93bd
--- /dev/null
+++ b/components/camel-ai/camel-openai/test_execution.md
@@ -0,0 +1,17 @@
+## Test execution
+
+### MacOS or Linux without nvidia graphic card
+If ollama is already installed on the system execute the test with
+
+```bash
+mvn verify -Dollama.endpoint=http://localhost:11434/ -Dollama.model=granite4:3b -Dollama.instance.type=remote
+```
+
+The Ollama docker image is really slow on macbook without nvidia hardware acceleration
+
+### Linux with Nvidia graphic card
+The hardware acceleration can be used, and the test can be executed with
+
+```bash
+mvn verify -Dollama.container.enable.gpu=enabled
+```
\ No newline at end of file