This is an automated email from the ASF dual-hosted git repository.

fmariani pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel.git


The following commit(s) were added to refs/heads/main by this push:
     new f5afffa046ec Update langchain4j agent test execution with qwen3-vl
f5afffa046ec is described below

commit f5afffa046eca963400f0dccdd086461adc1c3b2
Author: Croway <[email protected]>
AuthorDate: Thu Jan 29 19:15:38 2026 +0100

    Update langchain4j agent test execution with qwen3-vl
---
 .../integration/LangChain4jAgentWrappedFileIT.java |  2 ++
 .../langchain4j/agent/integration/ModelHelper.java |  8 +++----
 .../camel-langchain4j-agent/test-execution.md      | 28 +++++++++++++---------
 3 files changed, 23 insertions(+), 15 deletions(-)

diff --git 
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWrappedFileIT.java
 
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWrappedFileIT.java
index 70099319e9d0..994675473156 100644
--- 
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWrappedFileIT.java
+++ 
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWrappedFileIT.java
@@ -30,6 +30,7 @@ import 
org.apache.camel.test.infra.ollama.services.OllamaService;
 import org.apache.camel.test.infra.ollama.services.OllamaServiceFactory;
 import org.apache.camel.test.infra.ollama.services.OpenAIService;
 import org.apache.camel.test.junit5.CamelTestSupport;
+import org.junit.jupiter.api.Disabled;
 import org.junit.jupiter.api.Test;
 import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
 import org.junit.jupiter.api.extension.RegisterExtension;
@@ -104,6 +105,7 @@ public class LangChain4jAgentWrappedFileIT extends 
CamelTestSupport {
      * agent.
      */
     @Test
+    @Disabled("Only few models support PDF")
     void testPdfFileFromFileComponent() throws Exception {
         // Start only the PDF route
         context.getRouteController().startRoute(PDF_ROUTE_ID);
diff --git 
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/ModelHelper.java
 
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/ModelHelper.java
index 325607efc63e..d183074f61aa 100644
--- 
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/ModelHelper.java
+++ 
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/ModelHelper.java
@@ -55,7 +55,7 @@ public class ModelHelper {
         OpenAiChatModel.OpenAiChatModelBuilder builder = 
OpenAiChatModel.builder()
                 .apiKey(apiKey)
                 .temperature(1.0)
-                .timeout(ofSeconds(60))
+                .timeout(ofSeconds(120))
                 .logRequests(true)
                 .logResponses(true);
 
@@ -95,7 +95,7 @@ public class ModelHelper {
                 .baseUrl(baseUrl)
                 .modelName(modelName)
                 .temperature(1.0)
-                .timeout(ofSeconds(60))
+                .timeout(ofSeconds(120))
                 .logRequests(true)
                 .logResponses(true)
                 .build();
@@ -180,7 +180,7 @@ public class ModelHelper {
                     .baseUrl(openaiService.baseUrl())
                     .modelName(openaiService.modelName())
                     .temperature(1.0)
-                    .timeout(ofSeconds(60))
+                    .timeout(ofSeconds(120))
                     .logRequests(true)
                     .logResponses(true)
                     .build();
@@ -191,7 +191,7 @@ public class ModelHelper {
                 .baseUrl(ollamaService.baseUrl())
                 .modelName(ollamaService.modelName())
                 .temperature(0.3)
-                .timeout(ofSeconds(60))
+                .timeout(ofSeconds(120))
                 .build();
     }
 
diff --git a/components/camel-ai/camel-langchain4j-agent/test-execution.md 
b/components/camel-ai/camel-langchain4j-agent/test-execution.md
index 5c8c971aebcb..0b7e925e4d77 100644
--- a/components/camel-ai/camel-langchain4j-agent/test-execution.md
+++ b/components/camel-ai/camel-langchain4j-agent/test-execution.md
@@ -1,22 +1,22 @@
-## Test execution
+## Test Execution
 
-### MacOS or Linux without nvidia graphic card
-If ollama is already installed on the system execute the test with
+### macOS or Linux Without an NVIDIA Graphics Card
+If Ollama is already installed on the system, execute the tests with:
 
 ```bash
 mvn verify -Dollama.endpoint=http://localhost:11434/ 
-Dollama.model=granite4:3b -Dollama.instance.type=remote
 ```
 
-The Ollama docker image is really slow on macbook without nvidia hardware 
acceleration
+The Ollama Docker image is very slow on a MacBook without NVIDIA hardware 
acceleration.
 
-### Linux with Nvidia graphic card
-The hardware acceleration can be used, and the test can be executed with
+### Linux With an NVIDIA Graphics Card
+Hardware acceleration can be used, and the tests can be executed with:
 
 ```bash
 mvn verify -Dollama.container.enable.gpu=enabled
 ```
 
-### OpenAI or OpenAI-compatible endpoints
+### OpenAI or OpenAI-Compatible Endpoints
 To run tests against OpenAI or any OpenAI-compatible endpoint (including local 
Ollama via its OpenAI-compatible API):
 
 ```bash
@@ -32,7 +32,13 @@ mvn verify -Dollama.instance.type=openai \
     -Dopenai.api.key=dummy
 ```
 
-Available OpenAI properties:
-- `openai.api.key` - API key (required for real OpenAI, use "dummy" for Ollama)
-- `openai.endpoint` - Base URL (defaults to `https://api.openai.com/v1/`)
-- `openai.model` - Model name (defaults to `gpt-4o-mini`)
\ No newline at end of file
+Note that `LangChain4jAgentWrappedFileIT` and 
`LangChain4jAgentMultimodalityIT` require multimodal models.
+
+All tests (tools and multimodal) can be executed with an OpenAI instance and 
the model `qwen3-vl:8b`:
+
+```bash
+mvn verify -Dollama.instance.type=openai \
+    -Dopenai.endpoint=http://localhost:11434/v1/ \
+    -Dopenai.model=qwen3-vl:8b \
+    -Dopenai.api.key=dummy
+```
\ No newline at end of file

Reply via email to