This is an automated email from the ASF dual-hosted git repository.
fmariani pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel.git
The following commit(s) were added to refs/heads/main by this push:
new ab4a8f6300db Add a simple integration test + input validation
robustness
ab4a8f6300db is described below
commit ab4a8f6300db774258c03483c30d0cbec2937ae7
Author: Croway <[email protected]>
AuthorDate: Wed Dec 17 15:13:51 2025 +0100
Add a simple integration test + input validation robustness
---
components/camel-ai/camel-openai/pom.xml | 16 +++++
.../camel/component/openai/OpenAIProducer.java | 14 ++--
.../openai/integration/OpenAIChatCompletionIT.java | 79 ++++++++++++++++++++++
.../openai/integration/OpenAITestSupport.java | 54 +++++++++++++++
components/camel-ai/camel-openai/test_execution.md | 17 +++++
5 files changed, 176 insertions(+), 4 deletions(-)
diff --git a/components/camel-ai/camel-openai/pom.xml
b/components/camel-ai/camel-openai/pom.xml
index f63077ac32c3..149b9bc45325 100644
--- a/components/camel-ai/camel-openai/pom.xml
+++ b/components/camel-ai/camel-openai/pom.xml
@@ -33,6 +33,10 @@
<name>Camel :: AI :: OpenAI</name>
<description>Camel OpenAI component for chat completion using OpenAI
API</description>
+ <properties>
+ <failsafe.rerunFailingTestsCount>3</failsafe.rerunFailingTestsCount>
+ </properties>
+
<dependencies>
<dependency>
<groupId>org.apache.camel</groupId>
@@ -66,5 +70,17 @@
<artifactId>camel-jackson</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-test-infra-ollama</artifactId>
+ <version>${project.version}</version>
+ <type>test-jar</type>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.assertj</groupId>
+ <artifactId>assertj-core</artifactId>
+ <scope>test</scope>
+ </dependency>
</dependencies>
</project>
diff --git
a/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIProducer.java
b/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIProducer.java
index 3ccd26bf5611..5cbe1350bbbc 100644
---
a/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIProducer.java
+++
b/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIProducer.java
@@ -206,7 +206,14 @@ public class OpenAIProducer extends DefaultAsyncProducer {
addConversationHistory(messages, in, config);
ChatCompletionMessageParam userMessage = buildUserMessage(in, config);
- messages.add(userMessage);
+ if (userMessage != null) {
+ messages.add(userMessage);
+ }
+
+ if (messages.isEmpty()) {
+ throw new IllegalArgumentException(
+ "No input provided to LLM. At least one message (user,
system, or developer) must be provided");
+ }
return messages;
}
@@ -243,9 +250,8 @@ public class OpenAIProducer extends DefaultAsyncProducer {
private ChatCompletionMessageParam buildTextMessage(Message in, String
userPrompt, OpenAIConfiguration config) {
String prompt = userPrompt != null ? userPrompt :
in.getBody(String.class);
- if (prompt == null || prompt.isEmpty()) {
- throw new IllegalArgumentException(
- "Message body or user message configuration must contain
the prompt text");
+ if (prompt == null || prompt.trim().isEmpty()) {
+ return null;
}
return createTextMessage(prompt);
}
diff --git
a/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAIChatCompletionIT.java
b/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAIChatCompletionIT.java
new file mode 100644
index 000000000000..194e66ca7354
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAIChatCompletionIT.java
@@ -0,0 +1,79 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.openai.integration;
+
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.component.mock.MockEndpoint;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+
+@DisabledIfSystemProperty(named = "ci.env.name", matches = ".*",
+ disabledReason = "Requires too much network
resources")
+public class OpenAIChatCompletionIT extends OpenAITestSupport {
+
+ @Override
+ protected RouteBuilder createRouteBuilder() {
+ return new RouteBuilder() {
+ @Override
+ public void configure() {
+ // Route for simple message test
+ from("direct:send-simple-message")
+
.toF("openai:chat-completion?apiKey=%s&baseUrl=%s&model=%s", apiKey, baseUrl,
model)
+ .to("mock:response");
+ }
+ };
+ }
+
+ @Test
+ public void testSendSimpleStringMessage() throws Exception {
+ // Setup mock endpoint expectations
+ MockEndpoint mockResponse = getMockEndpoint("mock:response");
+ mockResponse.expectedMessageCount(1);
+
+ // Send a test message to the OpenAI endpoint
+ String response = template.requestBody("direct:send-simple-message",
+ "What is Apache Camel?",
+ String.class);
+
+ // Verify the mock endpoint received the message
+ mockResponse.assertIsSatisfied();
+
+ // Verify response is not null and contains meaningful content
+ assertThat(response).isNotNull();
+ assertThat(response).isNotEmpty();
+ assertThat(response.length()).isGreaterThan(10);
+
+ assertThat(response).contains("Camel");
+ assertThat(response).contains("Apache");
+ assertThat(response).contains("integration");
+ }
+
+ @Test
+ public void testEmptyMessageThrowsException() {
+ // Verify that empty messages result in an IllegalArgumentException
+ Exception exception = assertThrows(Exception.class, () -> {
+ template.requestBody("direct:send-simple-message", "",
String.class);
+ });
+
+ // Verify the exception is an IllegalArgumentException about empty
input
+
assertThat(exception.getCause()).isInstanceOf(IllegalArgumentException.class);
+ assertThat(exception.getCause().getMessage()).contains("No input
provided to LLM");
+ }
+}
diff --git
a/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAITestSupport.java
b/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAITestSupport.java
new file mode 100644
index 000000000000..0f4da0e4d0ac
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAITestSupport.java
@@ -0,0 +1,54 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.openai.integration;
+
+import org.apache.camel.test.infra.ollama.services.OllamaService;
+import org.apache.camel.test.infra.ollama.services.OllamaServiceFactory;
+import org.apache.camel.test.junit5.CamelTestSupport;
+
+public class OpenAITestSupport extends CamelTestSupport {
+
+ protected String apiKey;
+ protected String baseUrl;
+ protected String model;
+
+ static OllamaService OLLAMA = hasEnvironmentConfiguration()
+ ? null
+ : OllamaServiceFactory.createSingletonService();
+
+ @Override
+ protected void setupResources() throws Exception {
+ super.setupResources();
+
+ if (OLLAMA != null) {
+ // Use Ollama service
+ baseUrl = OLLAMA.baseUrlV1();
+ model = OLLAMA.modelName();
+ apiKey = "dummy"; // Ollama doesn't require API key
+ } else {
+ // Use environment variables
+ apiKey = System.getenv("OPENAI_API_KEY");
+ baseUrl = System.getenv("OPENAI_BASE_URL"); // Optional
+ model = System.getenv("OPENAI_MODEL"); // Optional
+ }
+ }
+
+ protected static boolean hasEnvironmentConfiguration() {
+ String apiKey = System.getenv("OPENAI_API_KEY");
+ return apiKey != null && !apiKey.trim().isEmpty();
+ }
+}
diff --git a/components/camel-ai/camel-openai/test_execution.md
b/components/camel-ai/camel-openai/test_execution.md
new file mode 100644
index 000000000000..65f8d672b93b
--- /dev/null
+++ b/components/camel-ai/camel-openai/test_execution.md
@@ -0,0 +1,17 @@
+## Test execution
+
+### MacOS or Linux without nvidia graphic card
+If ollama is already installed on the system execute the test with
+
+```bash
+mvn verify -Dollama.endpoint=http://localhost:11434/
-Dollama.model=granite4:3b -Dollama.instance.type=remote
+```
+
+The Ollama docker image is really slow on macbook without nvidia hardware
acceleration
+
+### Linux with Nvidia graphic card
+The hardware acceleration can be used, and the test can be executed with
+
+```bash
+mvn verify -Dollama.container.enable.gpu=enabled
+```
\ No newline at end of file