This is an automated email from the ASF dual-hosted git repository.
fmariani pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel.git
The following commit(s) were added to refs/heads/main by this push:
new d1be42ba9dba Add openai support to ollama test infra
d1be42ba9dba is described below
commit d1be42ba9dbad0cfdeee8ee550fb4713c7dfa0f0
Author: Croway <[email protected]>
AuthorDate: Thu Jan 29 16:34:51 2026 +0100
Add openai support to ollama test infra
---
.../camel-ai/camel-langchain4j-agent/pom.xml | 6 ++
.../agent/integration/AbstractRAGIT.java | 12 +--
.../integration/LangChain4jAgentCustomToolsIT.java | 8 +-
.../LangChain4jAgentGuardrailsIntegrationIT.java | 8 +-
.../integration/LangChain4jAgentMcpToolsIT.java | 8 +-
.../integration/LangChain4jAgentMixedToolsIT.java | 8 +-
.../LangChain4jAgentMultimodalityIT.java | 21 ++--
.../integration/LangChain4jAgentWithMemoryIT.java | 8 +-
.../integration/LangChain4jAgentWithToolsIT.java | 8 +-
.../integration/LangChain4jAgentWrappedFileIT.java | 34 +++---
.../integration/LangChain4jSimpleAgentIT.java | 8 +-
.../langchain4j/agent/integration/ModelHelper.java | 46 ++++----
.../camel-langchain4j-agent/test-execution.md | 25 ++++-
components/camel-ai/camel-langchain4j-chat/pom.xml | 6 ++
.../langchain4j/chat/OllamaTestSupport.java | 13 +++
.../camel-langchain4j-chat/test-execution.md | 25 ++++-
.../camel-langchain4j-tools/test-execution.md | 25 ++++-
components/camel-ai/camel-openai/pom.xml | 6 ++
.../openai/integration/OpenAITestSupport.java | 25 ++---
.../infra/ollama/commons/OllamaProperties.java | 5 +
.../infra/ollama/services/OpenAIInfraService.java | 117 +++++++++++++++++++++
.../ollama/services/OllamaServiceFactory.java | 5 +-
.../test/infra/ollama/services/OpenAIService.java | 31 ++++++
23 files changed, 352 insertions(+), 106 deletions(-)
diff --git a/components/camel-ai/camel-langchain4j-agent/pom.xml
b/components/camel-ai/camel-langchain4j-agent/pom.xml
index 0f84271be1ca..2336ee7ea209 100644
--- a/components/camel-ai/camel-langchain4j-agent/pom.xml
+++ b/components/camel-ai/camel-langchain4j-agent/pom.xml
@@ -103,6 +103,12 @@
<artifactId>assertj-core</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-test-infra-ollama</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-test-infra-ollama</artifactId>
diff --git
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/AbstractRAGIT.java
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/AbstractRAGIT.java
index af2698e6cc35..27d24022f13b 100644
---
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/AbstractRAGIT.java
+++
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/AbstractRAGIT.java
@@ -31,19 +31,19 @@ import
dev.langchain4j.store.embedding.inmemory.InMemoryEmbeddingStore;
import org.apache.camel.component.langchain4j.agent.BaseLangChain4jAgent;
import org.apache.camel.test.infra.ollama.services.OllamaService;
import org.apache.camel.test.infra.ollama.services.OllamaServiceFactory;
+import org.junit.jupiter.api.extension.RegisterExtension;
public abstract class AbstractRAGIT extends BaseLangChain4jAgent {
- static OllamaService OLLAMA = ModelHelper.hasEnvironmentConfiguration()
- ? null
- : OllamaServiceFactory.createSingletonService();
+ @RegisterExtension
+ static OllamaService OLLAMA =
OllamaServiceFactory.createSingletonService();
@Override
protected void setupResources() throws Exception {
super.setupResources();
// Setup components
- chatModel = OLLAMA != null ? ModelHelper.loadChatModel(OLLAMA) :
ModelHelper.loadFromEnv();
+ chatModel = ModelHelper.loadChatModel(OLLAMA);
retrievalAugmentor = createRetrievalAugmentor();
}
@@ -55,9 +55,7 @@ public abstract class AbstractRAGIT extends
BaseLangChain4jAgent {
List<TextSegment> segments = DocumentSplitters.recursive(300,
100).split(document);
// Create embeddings
- EmbeddingModel embeddingModel = OLLAMA != null
- ? ModelHelper.loadEmbeddingModel(OLLAMA)
- : ModelHelper.createEmbeddingModel();
+ EmbeddingModel embeddingModel = ModelHelper.loadEmbeddingModel(OLLAMA);
List<Embedding> embeddings =
embeddingModel.embedAll(segments).content();
diff --git
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentCustomToolsIT.java
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentCustomToolsIT.java
index 43e72c96c0a8..b7dde7d41540 100644
---
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentCustomToolsIT.java
+++
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentCustomToolsIT.java
@@ -33,6 +33,7 @@ import
org.apache.camel.test.infra.ollama.services.OllamaServiceFactory;
import org.apache.camel.test.junit5.CamelTestSupport;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
+import org.junit.jupiter.api.extension.RegisterExtension;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -49,14 +50,13 @@ public class LangChain4jAgentCustomToolsIT extends
CamelTestSupport {
protected ChatModel chatModel;
- static OllamaService OLLAMA = ModelHelper.hasEnvironmentConfiguration()
- ? null
- : OllamaServiceFactory.createSingletonService();
+ @RegisterExtension
+ static OllamaService OLLAMA =
OllamaServiceFactory.createSingletonService();
@Override
protected void setupResources() throws Exception {
super.setupResources();
- chatModel = OLLAMA != null ? ModelHelper.loadChatModel(OLLAMA) :
ModelHelper.loadFromEnv();
+ chatModel = ModelHelper.loadChatModel(OLLAMA);
}
@Test
diff --git
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentGuardrailsIntegrationIT.java
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentGuardrailsIntegrationIT.java
index 75921509abea..70f975f4bc57 100644
---
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentGuardrailsIntegrationIT.java
+++
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentGuardrailsIntegrationIT.java
@@ -33,6 +33,7 @@ import org.apache.camel.test.junit5.CamelTestSupport;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
+import org.junit.jupiter.api.extension.RegisterExtension;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
@@ -44,14 +45,13 @@ public class LangChain4jAgentGuardrailsIntegrationIT
extends CamelTestSupport {
protected ChatModel chatModel;
- static OllamaService OLLAMA = ModelHelper.hasEnvironmentConfiguration()
- ? null
- : OllamaServiceFactory.createSingletonService();
+ @RegisterExtension
+ static OllamaService OLLAMA =
OllamaServiceFactory.createSingletonService();
@Override
protected void setupResources() throws Exception {
super.setupResources();
- chatModel = OLLAMA != null ? ModelHelper.loadChatModel(OLLAMA) :
ModelHelper.loadFromEnv();
+ chatModel = ModelHelper.loadChatModel(OLLAMA);
}
@BeforeEach
diff --git
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentMcpToolsIT.java
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentMcpToolsIT.java
index eaec43962cfa..163c046169e2 100644
---
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentMcpToolsIT.java
+++
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentMcpToolsIT.java
@@ -38,6 +38,7 @@ import
org.apache.camel.test.infra.ollama.services.OllamaServiceFactory;
import org.apache.camel.test.junit5.CamelTestSupport;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
+import org.junit.jupiter.api.extension.RegisterExtension;
import org.junit.jupiter.api.io.TempDir;
import static org.junit.jupiter.api.Assertions.assertFalse;
@@ -63,14 +64,13 @@ public class LangChain4jAgentMcpToolsIT extends
CamelTestSupport {
protected ChatModel chatModel;
- static OllamaService OLLAMA = ModelHelper.hasEnvironmentConfiguration()
- ? null
- : OllamaServiceFactory.createSingletonService();
+ @RegisterExtension
+ static OllamaService OLLAMA =
OllamaServiceFactory.createSingletonService();
@Override
protected void setupResources() throws Exception {
super.setupResources();
- chatModel = OLLAMA != null ? ModelHelper.loadChatModel(OLLAMA) :
ModelHelper.loadFromEnv();
+ chatModel = ModelHelper.loadChatModel(OLLAMA);
// Initialize tempDirPath - use toRealPath() to resolve symlinks
(e.g., /var -> /private/var on macOS)
// This is needed because the MCP filesystem server resolves symlinks
internally
diff --git
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentMixedToolsIT.java
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentMixedToolsIT.java
index 3ce1841e466d..f12945fd8d94 100644
---
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentMixedToolsIT.java
+++
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentMixedToolsIT.java
@@ -32,6 +32,7 @@ import
org.apache.camel.test.infra.ollama.services.OllamaServiceFactory;
import org.apache.camel.test.junit5.CamelTestSupport;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
+import org.junit.jupiter.api.extension.RegisterExtension;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -52,14 +53,13 @@ public class LangChain4jAgentMixedToolsIT extends
CamelTestSupport {
protected ChatModel chatModel;
- static OllamaService OLLAMA = ModelHelper.hasEnvironmentConfiguration()
- ? null
- : OllamaServiceFactory.createSingletonService();
+ @RegisterExtension
+ static OllamaService OLLAMA =
OllamaServiceFactory.createSingletonService();
@Override
protected void setupResources() throws Exception {
super.setupResources();
- chatModel = OLLAMA != null ? ModelHelper.loadChatModel(OLLAMA) :
ModelHelper.loadFromEnv();
+ chatModel = ModelHelper.loadChatModel(OLLAMA);
}
@Test
diff --git
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentMultimodalityIT.java
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentMultimodalityIT.java
index 1deb09da84af..9715236d4b2e 100644
---
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentMultimodalityIT.java
+++
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentMultimodalityIT.java
@@ -32,10 +32,12 @@ import
org.apache.camel.component.langchain4j.agent.api.AiAgentBody;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.infra.ollama.services.OllamaService;
import org.apache.camel.test.infra.ollama.services.OllamaServiceFactory;
+import org.apache.camel.test.infra.ollama.services.OpenAIService;
import org.apache.camel.test.junit5.CamelTestSupport;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
+import org.junit.jupiter.api.extension.RegisterExtension;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -52,25 +54,26 @@ public class LangChain4jAgentMultimodalityIT extends
CamelTestSupport {
protected ChatModel chatModel;
- static OllamaService OLLAMA = ModelHelper.hasEnvironmentConfiguration()
- ? null
- : OllamaServiceFactory.createSingletonService();
+ @RegisterExtension
+ static OllamaService OLLAMA =
OllamaServiceFactory.createSingletonService();
@Override
protected void setupResources() throws Exception {
super.setupResources();
- chatModel = OLLAMA != null ? ModelHelper.loadChatModel(OLLAMA) :
ModelHelper.loadFromEnv();
+ chatModel = ModelHelper.loadChatModel(OLLAMA);
}
@BeforeEach
void skipIfOllama() {
- boolean isOllama = OLLAMA != null ||
"ollama".equals(System.getenv(ModelHelper.MODEL_PROVIDER));
- assumeFalse(isOllama,
+ // Skip if not using OpenAI - Ollama doesn't support multimodal content
+ boolean isOpenAI = OLLAMA instanceof OpenAIService;
+ assumeFalse(!isOpenAI,
"Skipping multimodality tests with Ollama: LangChain4j's
Ollama provider does not support " +
- "multiple content blocks in a single
UserMessage. The provider's InternalOllamaHelper.toText() " +
- "calls UserMessage.singleText() which requires
exactly one TextContent. " +
- "Use OpenAI or Gemini providers for multimodal
content testing.");
+ "multiple content blocks in a single
UserMessage. The provider's InternalOllamaHelper.toText() "
+ +
+ "calls UserMessage.singleText() which requires
exactly one TextContent. " +
+ "Use OpenAI or Gemini providers for multimodal
content testing.");
}
/**
diff --git
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWithMemoryIT.java
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWithMemoryIT.java
index 281c02228a98..667f997b7e24 100644
---
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWithMemoryIT.java
+++
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWithMemoryIT.java
@@ -34,6 +34,7 @@ import org.apache.camel.test.junit5.CamelTestSupport;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
+import org.junit.jupiter.api.extension.RegisterExtension;
import static
org.apache.camel.component.langchain4j.agent.api.Headers.MEMORY_ID;
import static
org.apache.camel.component.langchain4j.agent.api.Headers.SYSTEM_MESSAGE;
@@ -52,15 +53,14 @@ public class LangChain4jAgentWithMemoryIT extends
CamelTestSupport {
protected ChatMemoryProvider chatMemoryProvider;
private PersistentChatMemoryStore store;
- static OllamaService OLLAMA = ModelHelper.hasEnvironmentConfiguration()
- ? null
- : OllamaServiceFactory.createSingletonService();
+ @RegisterExtension
+ static OllamaService OLLAMA =
OllamaServiceFactory.createSingletonService();
@Override
protected void setupResources() throws Exception {
super.setupResources();
- chatModel = OLLAMA != null ? ModelHelper.loadChatModel(OLLAMA) :
ModelHelper.loadFromEnv();
+ chatModel = ModelHelper.loadChatModel(OLLAMA);
store = new PersistentChatMemoryStore();
chatMemoryProvider = createMemoryProvider();
}
diff --git
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWithToolsIT.java
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWithToolsIT.java
index 637fa752889b..81d4b8e4017f 100644
---
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWithToolsIT.java
+++
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWithToolsIT.java
@@ -30,6 +30,7 @@ import
org.apache.camel.test.infra.ollama.services.OllamaServiceFactory;
import org.apache.camel.test.junit5.CamelTestSupport;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
+import org.junit.jupiter.api.extension.RegisterExtension;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
@@ -44,15 +45,14 @@ public class LangChain4jAgentWithToolsIT extends
CamelTestSupport {
protected ChatModel chatModel;
- static OllamaService OLLAMA = ModelHelper.hasEnvironmentConfiguration()
- ? null
- : OllamaServiceFactory.createSingletonService();
+ @RegisterExtension
+ static OllamaService OLLAMA =
OllamaServiceFactory.createSingletonService();
@Override
protected void setupResources() throws Exception {
super.setupResources();
- chatModel = OLLAMA != null ? ModelHelper.loadChatModel(OLLAMA) :
ModelHelper.loadFromEnv();
+ chatModel = ModelHelper.loadChatModel(OLLAMA);
}
@Test
diff --git
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWrappedFileIT.java
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWrappedFileIT.java
index bbe6c190a9d3..70099319e9d0 100644
---
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWrappedFileIT.java
+++
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jAgentWrappedFileIT.java
@@ -26,25 +26,27 @@ import
org.apache.camel.component.langchain4j.agent.api.AgentConfiguration;
import org.apache.camel.component.langchain4j.agent.api.AgentWithoutMemory;
import org.apache.camel.component.langchain4j.agent.api.Headers;
import org.apache.camel.component.mock.MockEndpoint;
+import org.apache.camel.test.infra.ollama.services.OllamaService;
+import org.apache.camel.test.infra.ollama.services.OllamaServiceFactory;
+import org.apache.camel.test.infra.ollama.services.OpenAIService;
import org.apache.camel.test.junit5.CamelTestSupport;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
+import org.junit.jupiter.api.extension.RegisterExtension;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
-import static org.junit.jupiter.api.Assumptions.assumeFalse;
import static org.junit.jupiter.api.Assumptions.assumeTrue;
/**
* Integration tests for WrappedFile support in the LangChain4j Agent
component. Tests the ability to process files from
* the file component directly, with automatic Content type conversion based
on MIME type.
* <p>
- * Requires environment variables to be set:
+ * This test requires OpenAI or a compatible multimodal provider. Configure
via:
* <ul>
- * <li>API_KEY - The API key for the LLM provider</li>
- * <li>MODEL_PROVIDER - The provider name (e.g., "openai", "gemini",
"ollama")</li>
- * <li>MODEL_BASE_URL - (Optional) Custom base URL for OpenAI-compatible
endpoints</li>
- * <li>MODEL_NAME - (Optional) Custom model name</li>
+ * <li>{@code -Dollama.instance.type=openai}</li>
+ * <li>{@code -Dollama.api.key=sk-xxx}</li>
+ * <li>{@code -Dollama.model=gpt-4o-mini} (optional)</li>
* </ul>
*/
@DisabledIfSystemProperty(named = "ci.env.name", matches = ".*",
disabledReason = "Requires too much network resources")
@@ -53,6 +55,9 @@ public class LangChain4jAgentWrappedFileIT extends
CamelTestSupport {
private static final String IMAGE_ROUTE_ID = "image-route";
private static final String PDF_ROUTE_ID = "pdf-route";
+ @RegisterExtension
+ static OllamaService OLLAMA =
OllamaServiceFactory.createSingletonService();
+
protected ChatModel chatModel;
private String resourcesPath;
@@ -60,19 +65,12 @@ public class LangChain4jAgentWrappedFileIT extends
CamelTestSupport {
protected void setupResources() throws Exception {
super.setupResources();
- // Skip if no environment configuration - this test requires external
API providers
- assumeTrue(ModelHelper.hasEnvironmentConfiguration(),
- "Skipping: This test requires environment variables: API_KEY,
MODEL_PROVIDER. " +
- "Optionally:
MODEL_BASE_URL, MODEL_NAME");
-
- // Skip if using Ollama - it doesn't support multimodal content
- assumeFalse("ollama".equals(System.getenv(ModelHelper.MODEL_PROVIDER)),
- "Skipping wrapped file tests with Ollama: LangChain4j's Ollama
provider does not support " +
-
"multimodal content (images, PDFs). The provider's InternalOllamaHelper
requires "
-
+
-
"single text content in UserMessage. Use OpenAI or Gemini providers for file
processing tests.");
+ // Skip if not using OpenAI - Ollama doesn't support multimodal
content (images, PDFs)
+ assumeTrue(OLLAMA instanceof OpenAIService,
+ "Skipping wrapped file tests: This test requires OpenAI for
multimodal content support. " +
+ "Use
-Dollama.instance.type=openai -Dollama.api.key=sk-xxx");
- chatModel = ModelHelper.loadFromEnv();
+ chatModel = ModelHelper.loadChatModel(OLLAMA);
// Get the path to test resources
URL resourceUrl =
getClass().getClassLoader().getResource("camel-logo.png");
diff --git
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jSimpleAgentIT.java
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jSimpleAgentIT.java
index fead99f99156..76c8a6f038bc 100644
---
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jSimpleAgentIT.java
+++
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/LangChain4jSimpleAgentIT.java
@@ -30,6 +30,7 @@ import
org.apache.camel.test.infra.ollama.services.OllamaServiceFactory;
import org.apache.camel.test.junit5.CamelTestSupport;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.condition.DisabledIfSystemProperty;
+import org.junit.jupiter.api.extension.RegisterExtension;
import static
org.apache.camel.component.langchain4j.agent.api.Headers.SYSTEM_MESSAGE;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
@@ -50,15 +51,14 @@ public class LangChain4jSimpleAgentIT extends
CamelTestSupport {
protected ChatModel chatModel;
- static OllamaService OLLAMA = ModelHelper.hasEnvironmentConfiguration()
- ? null
- : OllamaServiceFactory.createSingletonService();
+ @RegisterExtension
+ static OllamaService OLLAMA =
OllamaServiceFactory.createSingletonService();
@Override
protected void setupResources() throws Exception {
super.setupResources();
- chatModel = OLLAMA != null ? ModelHelper.loadChatModel(OLLAMA) :
ModelHelper.loadFromEnv();
+ chatModel = ModelHelper.loadChatModel(OLLAMA);
}
@Test
diff --git
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/ModelHelper.java
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/ModelHelper.java
index 04379b647141..325607efc63e 100644
---
a/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/ModelHelper.java
+++
b/components/camel-ai/camel-langchain4j-agent/src/test/java/org/apache/camel/component/langchain4j/agent/integration/ModelHelper.java
@@ -28,6 +28,7 @@ import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiChatModelName;
import dev.langchain4j.model.openai.OpenAiEmbeddingModel;
import org.apache.camel.test.infra.ollama.services.OllamaService;
+import org.apache.camel.test.infra.ollama.services.OpenAIService;
import static java.time.Duration.ofSeconds;
@@ -168,20 +169,24 @@ public class ModelHelper {
}
/**
- * Load chat model from environment variables if configured, otherwise use
OllamaService. This allows tests to run
- * without requiring external API keys.
+ * Load chat model from OllamaService. Detects if the service is
OpenAIService and creates the appropriate model
+ * type.
*/
public static ChatModel loadChatModel(OllamaService ollamaService) {
- var apiKey = System.getenv(API_KEY);
- var modelProvider = System.getenv(MODEL_PROVIDER);
-
- if (apiKey != null && !apiKey.trim().isEmpty()
- && modelProvider != null && !modelProvider.trim().isEmpty()) {
- // Use environment-configured model
- return loadFromEnv();
+ // Detect OpenAI service and create OpenAI model
+ if (ollamaService instanceof OpenAIService openaiService) {
+ return OpenAiChatModel.builder()
+ .apiKey(openaiService.apiKey())
+ .baseUrl(openaiService.baseUrl())
+ .modelName(openaiService.modelName())
+ .temperature(1.0)
+ .timeout(ofSeconds(60))
+ .logRequests(true)
+ .logResponses(true)
+ .build();
}
- // Fallback to Ollama service
+ // Standard Ollama model
return OllamaChatModel.builder()
.baseUrl(ollamaService.baseUrl())
.modelName(ollamaService.modelName())
@@ -191,20 +196,21 @@ public class ModelHelper {
}
/**
- * Load embedding model from environment variables if configured,
otherwise use OllamaService. This allows tests to
- * run without requiring external API keys.
+ * Load embedding model from OllamaService. Detects if the service is
OpenAIService and creates the appropriate
+ * model type.
*/
public static EmbeddingModel loadEmbeddingModel(OllamaService
ollamaService) {
- var apiKey = System.getenv(API_KEY);
- var modelProvider = System.getenv(MODEL_PROVIDER);
-
- if (apiKey != null && !apiKey.trim().isEmpty()
- && modelProvider != null && !modelProvider.trim().isEmpty()) {
- // Use environment-configured embedding model
- return createEmbeddingModel();
+ // Detect OpenAI service and create OpenAI embedding model
+ if (ollamaService instanceof OpenAIService openaiService) {
+ return OpenAiEmbeddingModel.builder()
+ .apiKey(openaiService.apiKey())
+ .baseUrl(openaiService.baseUrl())
+ .modelName("granite-embedding")
+ .timeout(ofSeconds(30))
+ .build();
}
- // Fallback to Ollama service
+ // Standard Ollama embedding model
return OllamaEmbeddingModel.builder()
.baseUrl(ollamaService.baseUrl())
.modelName(ollamaService.modelName())
diff --git a/components/camel-ai/camel-langchain4j-agent/test-execution.md
b/components/camel-ai/camel-langchain4j-agent/test-execution.md
index ad514ecc889c..5c8c971aebcb 100644
--- a/components/camel-ai/camel-langchain4j-agent/test-execution.md
+++ b/components/camel-ai/camel-langchain4j-agent/test-execution.md
@@ -1,7 +1,7 @@
## Test execution
### MacOS or Linux without nvidia graphic card
-If ollama is already installed on the system execute the test with
+If ollama is already installed on the system execute the test with
```bash
mvn verify -Dollama.endpoint=http://localhost:11434/
-Dollama.model=granite4:3b -Dollama.instance.type=remote
@@ -14,4 +14,25 @@ The hardware acceleration can be used, and the test can be
executed with
```bash
mvn verify -Dollama.container.enable.gpu=enabled
-```
\ No newline at end of file
+```
+
+### OpenAI or OpenAI-compatible endpoints
+To run tests against OpenAI or any OpenAI-compatible endpoint (including local
Ollama via its OpenAI-compatible API):
+
+```bash
+# Using real OpenAI
+mvn verify -Dollama.instance.type=openai \
+ -Dopenai.api.key=sk-your-api-key \
+ -Dopenai.model=gpt-4o-mini
+
+# Using local Ollama as OpenAI-compatible endpoint
+mvn verify -Dollama.instance.type=openai \
+ -Dopenai.endpoint=http://localhost:11434/v1/ \
+ -Dopenai.model=granite4:3b \
+ -Dopenai.api.key=dummy
+```
+
+Available OpenAI properties:
+- `openai.api.key` - API key (required for real OpenAI, use "dummy" for Ollama)
+- `openai.endpoint` - Base URL (defaults to `https://api.openai.com/v1/`)
+- `openai.model` - Model name (defaults to `gpt-4o-mini`)
\ No newline at end of file
diff --git a/components/camel-ai/camel-langchain4j-chat/pom.xml
b/components/camel-ai/camel-langchain4j-chat/pom.xml
index 01476788701a..36dfae115a5f 100644
--- a/components/camel-ai/camel-langchain4j-chat/pom.xml
+++ b/components/camel-ai/camel-langchain4j-chat/pom.xml
@@ -75,6 +75,12 @@
<version>${langchain4j-version}</version>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-test-infra-ollama</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-test-infra-ollama</artifactId>
diff --git
a/components/camel-ai/camel-langchain4j-chat/src/test/java/org/apache/camel/component/langchain4j/chat/OllamaTestSupport.java
b/components/camel-ai/camel-langchain4j-chat/src/test/java/org/apache/camel/component/langchain4j/chat/OllamaTestSupport.java
index 64467528c587..7824a084d7df 100644
---
a/components/camel-ai/camel-langchain4j-chat/src/test/java/org/apache/camel/component/langchain4j/chat/OllamaTestSupport.java
+++
b/components/camel-ai/camel-langchain4j-chat/src/test/java/org/apache/camel/component/langchain4j/chat/OllamaTestSupport.java
@@ -18,8 +18,10 @@ package org.apache.camel.component.langchain4j.chat;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
+import dev.langchain4j.model.openai.OpenAiChatModel;
import org.apache.camel.test.infra.ollama.services.OllamaService;
import org.apache.camel.test.infra.ollama.services.OllamaServiceFactory;
+import org.apache.camel.test.infra.ollama.services.OpenAIService;
import org.apache.camel.test.junit5.CamelTestSupport;
import org.junit.jupiter.api.extension.RegisterExtension;
@@ -40,6 +42,17 @@ public class OllamaTestSupport extends CamelTestSupport {
}
protected ChatModel createModel() {
+ if (OLLAMA instanceof OpenAIService) {
+ return OpenAiChatModel.builder()
+ .apiKey(OLLAMA.apiKey())
+ .baseUrl(OLLAMA.baseUrl())
+ .modelName(OLLAMA.modelName())
+ .temperature(0.3)
+ .timeout(ofSeconds(60))
+ .logRequests(true)
+ .logResponses(true)
+ .build();
+ }
return OllamaChatModel.builder()
.baseUrl(OLLAMA.baseUrl())
.modelName(OLLAMA.modelName())
diff --git a/components/camel-ai/camel-langchain4j-chat/test-execution.md
b/components/camel-ai/camel-langchain4j-chat/test-execution.md
index 79033b6f7942..75f36c07b573 100644
--- a/components/camel-ai/camel-langchain4j-chat/test-execution.md
+++ b/components/camel-ai/camel-langchain4j-chat/test-execution.md
@@ -1,7 +1,7 @@
## Test execution
### MacOS or Linux without nvidia graphic card
-If ollama is already installed on the system execute the test with
+If ollama is already installed on the system execute the test with
```bash
mvn verify -Dollama.endpoint=http://localhost:11434/
-Dollama.model=granite4:tiny-h -Dollama.instance.type=remote
@@ -14,4 +14,25 @@ The hardware acceleration can be used, and the test can be
executed with
```bash
mvn verify -Dollama.container.enable.gpu=enabled
-```
\ No newline at end of file
+```
+
+### OpenAI or OpenAI-compatible endpoints
+To run tests against OpenAI or any OpenAI-compatible endpoint (including local
Ollama via its OpenAI-compatible API):
+
+```bash
+# Using real OpenAI
+mvn verify -Dollama.instance.type=openai \
+ -Dopenai.api.key=sk-your-api-key \
+ -Dopenai.model=gpt-4o-mini
+
+# Using local Ollama as OpenAI-compatible endpoint
+mvn verify -Dollama.instance.type=openai \
+ -Dopenai.endpoint=http://localhost:11434/v1/ \
+ -Dopenai.model=granite4:tiny-h \
+ -Dopenai.api.key=dummy
+```
+
+Available OpenAI properties:
+- `openai.api.key` - API key (required for real OpenAI, use "dummy" for Ollama)
+- `openai.endpoint` - Base URL (defaults to `https://api.openai.com/v1/`)
+- `openai.model` - Model name (defaults to `gpt-4o-mini`)
\ No newline at end of file
diff --git a/components/camel-ai/camel-langchain4j-tools/test-execution.md
b/components/camel-ai/camel-langchain4j-tools/test-execution.md
index 4402c014b932..0a8a6042b57a 100644
--- a/components/camel-ai/camel-langchain4j-tools/test-execution.md
+++ b/components/camel-ai/camel-langchain4j-tools/test-execution.md
@@ -1,7 +1,7 @@
## Test execution
### MacOS or Linux without nvidia graphic card
-If ollama is already installed on the system execute the test with
+If ollama is already installed on the system execute the test with
```bash
mvn verify -Dollama.endpoint=http://localhost:11434/
-Dollama.model=granite4:tiny-h -Dollama.instance.type=remote
@@ -14,4 +14,25 @@ The hardware acceleration can be used, and the test can be
executed with
```bash
mvn verify -Dollama.container.enable.gpu=enabled
-```
\ No newline at end of file
+```
+
+### OpenAI or OpenAI-compatible endpoints
+To run tests against OpenAI or any OpenAI-compatible endpoint (including local
Ollama via its OpenAI-compatible API):
+
+```bash
+# Using real OpenAI
+mvn verify -Dollama.instance.type=openai \
+ -Dopenai.api.key=sk-your-api-key \
+ -Dopenai.model=gpt-4o-mini
+
+# Using local Ollama as OpenAI-compatible endpoint
+mvn verify -Dollama.instance.type=openai \
+ -Dopenai.endpoint=http://localhost:11434/v1/ \
+ -Dopenai.model=granite4:tiny-h \
+ -Dopenai.api.key=dummy
+```
+
+Available OpenAI properties:
+- `openai.api.key` - API key (required for real OpenAI, use "dummy" for Ollama)
+- `openai.endpoint` - Base URL (defaults to `https://api.openai.com/v1/`)
+- `openai.model` - Model name (defaults to `gpt-4o-mini`)
\ No newline at end of file
diff --git a/components/camel-ai/camel-openai/pom.xml
b/components/camel-ai/camel-openai/pom.xml
index 36a1edfa6330..6af7b3c3446e 100644
--- a/components/camel-ai/camel-openai/pom.xml
+++ b/components/camel-ai/camel-openai/pom.xml
@@ -70,6 +70,12 @@
<artifactId>camel-jackson</artifactId>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-test-infra-ollama</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-test-infra-ollama</artifactId>
diff --git
a/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAITestSupport.java
b/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAITestSupport.java
index 72f92d33ade7..a27db639b9ee 100644
---
a/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAITestSupport.java
+++
b/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/integration/OpenAITestSupport.java
@@ -22,6 +22,7 @@ import
org.apache.camel.test.infra.ollama.services.OllamaService;
import org.apache.camel.test.infra.ollama.services.OllamaServiceFactory;
import org.apache.camel.test.junit5.CamelTestSupport;
import org.apache.camel.util.ObjectHelper;
+import org.junit.jupiter.api.extension.RegisterExtension;
public class OpenAITestSupport extends CamelTestSupport {
@@ -29,24 +30,18 @@ public class OpenAITestSupport extends CamelTestSupport {
protected String baseUrl;
protected String model;
- static OllamaService OLLAMA = hasEnvironmentConfiguration()
- ? null
- : OllamaServiceFactory.createSingletonService();
+ @RegisterExtension
+ static OllamaService OLLAMA =
OllamaServiceFactory.createSingletonService();
@Override
protected void setupResources() throws Exception {
super.setupResources();
- if (OLLAMA != null) {
- // Use Ollama service
- baseUrl = OLLAMA.baseUrlV1();
- model = OLLAMA.modelName();
- apiKey = "dummy"; // Ollama doesn't require API key
- } else {
- // Use environment variables
- apiKey = System.getenv("OPENAI_API_KEY");
- baseUrl = System.getenv("OPENAI_BASE_URL"); // Optional
- model = System.getenv("OPENAI_MODEL"); // Optional
+ baseUrl = OLLAMA.baseUrlV1();
+ model = OLLAMA.modelName();
+ apiKey = OLLAMA.apiKey();
+ if (apiKey == null || apiKey.isEmpty()) {
+ apiKey = "dummy";
}
}
@@ -70,8 +65,4 @@ public class OpenAITestSupport extends CamelTestSupport {
return camelContext;
}
- protected static boolean hasEnvironmentConfiguration() {
- String apiKey = System.getenv("OPENAI_API_KEY");
- return apiKey != null && !apiKey.trim().isEmpty();
- }
}
diff --git
a/test-infra/camel-test-infra-ollama/src/main/java/org/apache/camel/test/infra/ollama/commons/OllamaProperties.java
b/test-infra/camel-test-infra-ollama/src/main/java/org/apache/camel/test/infra/ollama/commons/OllamaProperties.java
index 7b31c706de1d..a5f1b109e117 100644
---
a/test-infra/camel-test-infra-ollama/src/main/java/org/apache/camel/test/infra/ollama/commons/OllamaProperties.java
+++
b/test-infra/camel-test-infra-ollama/src/main/java/org/apache/camel/test/infra/ollama/commons/OllamaProperties.java
@@ -37,6 +37,11 @@ public class OllamaProperties {
public static final String MEMORY_LIMIT = "ollama.container.memory.limit";
public static final String ENABLE_GPU = "ollama.container.enable.gpu";
+ // OpenAI-specific properties (used when ollama.instance.type=openai)
+ public static final String OPENAI_API_KEY = "openai.api.key";
+ public static final String OPENAI_ENDPOINT = "openai.endpoint";
+ public static final String OPENAI_MODEL = "openai.model";
+
private OllamaProperties() {
}
diff --git
a/test-infra/camel-test-infra-ollama/src/main/java/org/apache/camel/test/infra/ollama/services/OpenAIInfraService.java
b/test-infra/camel-test-infra-ollama/src/main/java/org/apache/camel/test/infra/ollama/services/OpenAIInfraService.java
new file mode 100644
index 000000000000..6e68e2413bdc
--- /dev/null
+++
b/test-infra/camel-test-infra-ollama/src/main/java/org/apache/camel/test/infra/ollama/services/OpenAIInfraService.java
@@ -0,0 +1,117 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.test.infra.ollama.services;
+
+import org.apache.camel.test.infra.ollama.commons.OllamaProperties;
+
+/**
+ * OllamaInfraService implementation for OpenAI and OpenAI-compatible
endpoints.
+ *
+ * Usage example:
+ *
+ * <pre>
+ * mvn verify -Dollama.instance.type=openai \
+ * -Dopenai.api.key=sk-xxx \
+ * -Dopenai.model=gpt-4o-mini
+ * </pre>
+ */
+public class OpenAIInfraService implements OllamaInfraService {
+
+ private static final String DEFAULT_BASE_URL =
"https://api.openai.com/v1/";
+ private static final String DEFAULT_MODEL_NAME = "gpt-4o-mini";
+
+ // Environment variable names
+ private static final String ENV_OPENAI_API_KEY = "OPENAI_API_KEY";
+ private static final String ENV_OPENAI_BASE_URL = "OPENAI_BASE_URL";
+ private static final String ENV_OPENAI_MODEL = "OPENAI_MODEL";
+
+ @Override
+ public void registerProperties() {
+ // NO-OP
+ }
+
+ @Override
+ public void initialize() {
+ registerProperties();
+ }
+
+ @Override
+ public void shutdown() {
+ // NO-OP
+ }
+
+ @Override
+ public String getEndpoint() {
+ return baseUrl();
+ }
+
+ @Override
+ public String getModel() {
+ return modelName();
+ }
+
+ @Override
+ public String modelName() {
+ // First try openai.model system property
+ String sysProp = System.getProperty(OllamaProperties.OPENAI_MODEL);
+ if (sysProp != null && !sysProp.trim().isEmpty()) {
+ return sysProp;
+ }
+ // Then try OPENAI_MODEL environment variable
+ String envVar = System.getenv(ENV_OPENAI_MODEL);
+ if (envVar != null && !envVar.trim().isEmpty()) {
+ return envVar;
+ }
+ return DEFAULT_MODEL_NAME;
+ }
+
+ @Override
+ public String baseUrl() {
+ // First try openai.endpoint system property
+ String sysProp = System.getProperty(OllamaProperties.OPENAI_ENDPOINT);
+ if (sysProp != null && !sysProp.trim().isEmpty()) {
+ return sysProp;
+ }
+ // Then try OPENAI_BASE_URL environment variable
+ String envVar = System.getenv(ENV_OPENAI_BASE_URL);
+ if (envVar != null && !envVar.trim().isEmpty()) {
+ return envVar;
+ }
+ return DEFAULT_BASE_URL;
+ }
+
+ @Override
+ public String baseUrlV1() {
+ String url = baseUrl();
+ // OpenAI URLs typically already end with /v1/
+ if (url.endsWith("/v1/") || url.endsWith("/v1")) {
+ return url.endsWith("/") ? url.substring(0, url.length() - 1) :
url;
+ }
+ return url + (url.endsWith("/") ? "v1" : "/v1");
+ }
+
+ @Override
+ public String apiKey() {
+ // First try openai.api.key system property
+ String sysProp = System.getProperty(OllamaProperties.OPENAI_API_KEY);
+ if (sysProp != null && !sysProp.trim().isEmpty()) {
+ return sysProp;
+ }
+ // Then try OPENAI_API_KEY environment variable
+ return System.getenv(ENV_OPENAI_API_KEY);
+ }
+}
diff --git
a/test-infra/camel-test-infra-ollama/src/test/java/org/apache/camel/test/infra/ollama/services/OllamaServiceFactory.java
b/test-infra/camel-test-infra-ollama/src/test/java/org/apache/camel/test/infra/ollama/services/OllamaServiceFactory.java
index 5eb19c3dece6..24f13e118884 100644
---
a/test-infra/camel-test-infra-ollama/src/test/java/org/apache/camel/test/infra/ollama/services/OllamaServiceFactory.java
+++
b/test-infra/camel-test-infra-ollama/src/test/java/org/apache/camel/test/infra/ollama/services/OllamaServiceFactory.java
@@ -123,6 +123,7 @@ public final class OllamaServiceFactory {
return builder()
.addLocalMapping(OllamaServiceFactory::selectLocalService)
.addRemoteMapping(OllamaRemoteService::new)
+ .addMapping("openai", OpenAIService::new)
.build();
}
@@ -130,6 +131,7 @@ public final class OllamaServiceFactory {
return builder()
.addLocalMapping(() ->
selectLocalService(serviceConfiguration))
.addRemoteMapping(() -> new
OllamaRemoteService(serviceConfiguration))
+ .addMapping("openai", OpenAIService::new)
.build();
}
@@ -147,7 +149,8 @@ public final class OllamaServiceFactory {
SimpleTestServiceBuilder<OllamaService> instance = builder();
instance.addLocalMapping(() -> new
SingletonOllamaService(selectLocalService(), "ollama"))
- .addRemoteMapping(OllamaRemoteService::new);
+ .addRemoteMapping(OllamaRemoteService::new)
+ .addMapping("openai", OpenAIService::new);
INSTANCE = instance.build();
}
diff --git
a/test-infra/camel-test-infra-ollama/src/test/java/org/apache/camel/test/infra/ollama/services/OpenAIService.java
b/test-infra/camel-test-infra-ollama/src/test/java/org/apache/camel/test/infra/ollama/services/OpenAIService.java
new file mode 100644
index 000000000000..9fb289441019
--- /dev/null
+++
b/test-infra/camel-test-infra-ollama/src/test/java/org/apache/camel/test/infra/ollama/services/OpenAIService.java
@@ -0,0 +1,31 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.test.infra.ollama.services;
+
+/**
+ * Test service for OpenAI and OpenAI-compatible endpoints.
+ * <p>
+ * Use this service when testing with OpenAI API or compatible providers.
Configure via:
+ * <ul>
+ * <li>{@code -Dollama.instance.type=openai}</li>
+ * <li>{@code -Dollama.api.key=sk-xxx}</li>
+ * <li>{@code -Dollama.model=gpt-4o-mini} (optional, defaults to
gpt-4o-mini)</li>
+ * <li>{@code -Dollama.endpoint=https://api.openai.com/v1/} (optional)</li>
+ * </ul>
+ */
+public class OpenAIService extends OpenAIInfraService implements OllamaService
{
+}