Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
16 changes: 16 additions & 0 deletions components/camel-ai/camel-openai/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,10 @@
<name>Camel :: AI :: OpenAI</name>
<description>Camel OpenAI component for chat completion using OpenAI API</description>

<properties>
<failsafe.rerunFailingTestsCount>3</failsafe.rerunFailingTestsCount>
</properties>

<dependencies>
<dependency>
<groupId>org.apache.camel</groupId>
Expand Down Expand Up @@ -66,5 +70,17 @@
<artifactId>camel-jackson</artifactId>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-test-infra-ollama</artifactId>
<version>${project.version}</version>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.assertj</groupId>
<artifactId>assertj-core</artifactId>
<scope>test</scope>
</dependency>
</dependencies>
</project>
Original file line number Diff line number Diff line change
Expand Up @@ -206,7 +206,14 @@ private List<ChatCompletionMessageParam> buildMessages(Exchange exchange, OpenAI
addConversationHistory(messages, in, config);

ChatCompletionMessageParam userMessage = buildUserMessage(in, config);
messages.add(userMessage);
if (userMessage != null) {
messages.add(userMessage);
}

if (messages.isEmpty()) {
throw new IllegalArgumentException(
"No input provided to LLM. At least one message (user, system, or developer) must be provided");
}

return messages;
}
Expand Down Expand Up @@ -243,9 +250,8 @@ private ChatCompletionMessageParam buildUserMessage(Message in, OpenAIConfigurat

private ChatCompletionMessageParam buildTextMessage(Message in, String userPrompt, OpenAIConfiguration config) {
String prompt = userPrompt != null ? userPrompt : in.getBody(String.class);
if (prompt == null || prompt.isEmpty()) {
throw new IllegalArgumentException(
"Message body or user message configuration must contain the prompt text");
if (prompt == null || prompt.trim().isEmpty()) {
return null;
}
return createTextMessage(prompt);
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.openai.integration;

import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;

import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;

@DisabledIfSystemProperty(named = "ci.env.name", matches = ".*",
disabledReason = "Requires too much network resources")
public class OpenAIChatCompletionIT extends OpenAITestSupport {

@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
@Override
public void configure() {
// Route for simple message test
from("direct:send-simple-message")
.toF("openai:chat-completion?apiKey=%s&baseUrl=%s&model=%s", apiKey, baseUrl, model)
.to("mock:response");
}
};
}

@Test
public void testSendSimpleStringMessage() throws Exception {
// Setup mock endpoint expectations
MockEndpoint mockResponse = getMockEndpoint("mock:response");
mockResponse.expectedMessageCount(1);

// Send a test message to the OpenAI endpoint
String response = template.requestBody("direct:send-simple-message",
"What is Apache Camel?",
String.class);

// Verify the mock endpoint received the message
mockResponse.assertIsSatisfied();

// Verify response is not null and contains meaningful content
assertThat(response).isNotNull();
assertThat(response).isNotEmpty();
assertThat(response.length()).isGreaterThan(10);

assertThat(response).contains("Camel");
assertThat(response).contains("Apache");
assertThat(response).contains("integration");
}

@Test
public void testEmptyMessageThrowsException() {
// Verify that empty messages result in an IllegalArgumentException
Exception exception = assertThrows(Exception.class, () -> {
template.requestBody("direct:send-simple-message", "", String.class);
});

// Verify the exception is an IllegalArgumentException about empty input
assertThat(exception.getCause()).isInstanceOf(IllegalArgumentException.class);
assertThat(exception.getCause().getMessage()).contains("No input provided to LLM");
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.camel.component.openai.integration;

import org.apache.camel.test.infra.ollama.services.OllamaService;
import org.apache.camel.test.infra.ollama.services.OllamaServiceFactory;
import org.apache.camel.test.junit5.CamelTestSupport;

public class OpenAITestSupport extends CamelTestSupport {

protected String apiKey;
protected String baseUrl;
protected String model;

static OllamaService OLLAMA = hasEnvironmentConfiguration()
? null
: OllamaServiceFactory.createSingletonService();

@Override
protected void setupResources() throws Exception {
super.setupResources();

if (OLLAMA != null) {
// Use Ollama service
baseUrl = OLLAMA.baseUrlV1();
model = OLLAMA.modelName();
apiKey = "dummy"; // Ollama doesn't require API key
} else {
// Use environment variables
apiKey = System.getenv("OPENAI_API_KEY");
baseUrl = System.getenv("OPENAI_BASE_URL"); // Optional
model = System.getenv("OPENAI_MODEL"); // Optional
}
}

protected static boolean hasEnvironmentConfiguration() {
String apiKey = System.getenv("OPENAI_API_KEY");
return apiKey != null && !apiKey.trim().isEmpty();
}
}
17 changes: 17 additions & 0 deletions components/camel-ai/camel-openai/test_execution.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
## Test execution

### MacOS or Linux without nvidia graphic card
If ollama is already installed on the system execute the test with

```bash
mvn verify -Dollama.endpoint=http://localhost:11434/ -Dollama.model=granite4:3b -Dollama.instance.type=remote
```

The Ollama docker image is really slow on macbook without nvidia hardware acceleration

### Linux with Nvidia graphic card
The hardware acceleration can be used, and the test can be executed with

```bash
mvn verify -Dollama.container.enable.gpu=enabled
```