This is an automated email from the ASF dual-hosted git repository.
davsclaus pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/camel.git
The following commit(s) were added to refs/heads/main by this push:
new ac0b0e87f26a CAMEL-22661: Add a native OpenAI component (#19820)
ac0b0e87f26a is described below
commit ac0b0e87f26abb022a24d18f3a32e1cf365fa40a
Author: Ivo Bek <[email protected]>
AuthorDate: Sat Nov 8 10:49:13 2025 +0100
CAMEL-22661: Add a native OpenAI component (#19820)
* CAMEL-22661: Add a native OpenAI component
* CAMEL-22661: Generated doc updates and ObjectHelper for consistency
* CAMEL-22661: add support for tools and improve component configuration
* CAMEL-22661: optional conversation and full response in exchange
properties
* CAMEL-22661: update camel-openai docs
* CAMEL-22661: revert and remove tools to keep openai simple and non-agentic
---
bom/camel-bom/pom.xml | 5 +
catalog/camel-allcomponents/pom.xml | 5 +
components/camel-ai/camel-openai/pom.xml | 70 ++++
.../openai/OpenAIComponentConfigurer.java | 72 ++++
.../component/openai/OpenAIEndpointConfigurer.java | 132 ++++++
.../component/openai/OpenAIEndpointUriFactory.java | 88 ++++
.../org/apache/camel/component/openai/openai.json | 52 +++
.../services/org/apache/camel/component.properties | 7 +
.../services/org/apache/camel/component/openai | 2 +
.../org/apache/camel/configurer/openai-component | 2 +
.../org/apache/camel/configurer/openai-endpoint | 2 +
.../org/apache/camel/urifactory/openai-endpoint | 2 +
.../src/main/docs/openai-component.adoc | 329 +++++++++++++++
.../camel/component/openai/OpenAIComponent.java | 86 ++++
.../component/openai/OpenAIConfiguration.java | 218 ++++++++++
.../camel/component/openai/OpenAIConstants.java | 50 +++
.../camel/component/openai/OpenAIEndpoint.java | 127 ++++++
.../camel/component/openai/OpenAIProducer.java | 445 +++++++++++++++++++++
.../component/openai/OpenAIProducerMockTest.java | 89 +++++
components/camel-ai/pom.xml | 1 +
.../modules/ROOT/examples/json/openai.json | 1 +
docs/components/modules/ROOT/nav.adoc | 1 +
.../modules/ROOT/pages/openai-component.adoc | 1 +
parent/pom.xml | 11 +
24 files changed, 1798 insertions(+)
diff --git a/bom/camel-bom/pom.xml b/bom/camel-bom/pom.xml
index da1abc1df571..35074ea948e7 100644
--- a/bom/camel-bom/pom.xml
+++ b/bom/camel-bom/pom.xml
@@ -1667,6 +1667,11 @@
<artifactId>camel-olingo4-api</artifactId>
<version>4.17.0-SNAPSHOT</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-openai</artifactId>
+ <version>4.17.0-SNAPSHOT</version>
+ </dependency>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-openapi-java</artifactId>
diff --git a/catalog/camel-allcomponents/pom.xml
b/catalog/camel-allcomponents/pom.xml
index 7b21d4494e27..7edb1f43e46b 100644
--- a/catalog/camel-allcomponents/pom.xml
+++ b/catalog/camel-allcomponents/pom.xml
@@ -1476,6 +1476,11 @@
<artifactId>camel-olingo4-api</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-openai</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-openapi-java</artifactId>
diff --git a/components/camel-ai/camel-openai/pom.xml
b/components/camel-ai/camel-openai/pom.xml
new file mode 100644
index 000000000000..f63077ac32c3
--- /dev/null
+++ b/components/camel-ai/camel-openai/pom.xml
@@ -0,0 +1,70 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0"
+ xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0
http://maven.apache.org/xsd/maven-4.0.0.xsd">
+ <modelVersion>4.0.0</modelVersion>
+
+ <parent>
+ <artifactId>camel-ai-parent</artifactId>
+ <groupId>org.apache.camel</groupId>
+ <version>4.17.0-SNAPSHOT</version>
+ </parent>
+
+ <artifactId>camel-openai</artifactId>
+ <packaging>jar</packaging>
+ <name>Camel :: AI :: OpenAI</name>
+ <description>Camel OpenAI component for chat completion using OpenAI
API</description>
+
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-support</artifactId>
+ </dependency>
+
+ <dependency>
+ <groupId>com.openai</groupId>
+ <artifactId>openai-java</artifactId>
+ </dependency>
+
+ <!-- Test dependencies -->
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-test-junit5</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.junit.jupiter</groupId>
+ <artifactId>junit-jupiter</artifactId>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-test-infra-openai-mock</artifactId>
+ <version>${project.version}</version>
+ <scope>test</scope>
+ </dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-jackson</artifactId>
+ <scope>test</scope>
+ </dependency>
+ </dependencies>
+</project>
diff --git
a/components/camel-ai/camel-openai/src/generated/java/org/apache/camel/component/openai/OpenAIComponentConfigurer.java
b/components/camel-ai/camel-openai/src/generated/java/org/apache/camel/component/openai/OpenAIComponentConfigurer.java
new file mode 100644
index 000000000000..5110b39a1f5a
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/generated/java/org/apache/camel/component/openai/OpenAIComponentConfigurer.java
@@ -0,0 +1,72 @@
+/* Generated by camel build tools - do NOT edit this file! */
+package org.apache.camel.component.openai;
+
+import javax.annotation.processing.Generated;
+import java.util.Map;
+
+import org.apache.camel.CamelContext;
+import org.apache.camel.spi.ExtendedPropertyConfigurerGetter;
+import org.apache.camel.spi.PropertyConfigurerGetter;
+import org.apache.camel.spi.ConfigurerStrategy;
+import org.apache.camel.spi.GeneratedPropertyConfigurer;
+import org.apache.camel.util.CaseInsensitiveMap;
+import org.apache.camel.support.component.PropertyConfigurerSupport;
+
+/**
+ * Generated by camel build tools - do NOT edit this file!
+ */
+@Generated("org.apache.camel.maven.packaging.EndpointSchemaGeneratorMojo")
+@SuppressWarnings("unchecked")
+public class OpenAIComponentConfigurer extends PropertyConfigurerSupport
implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
+
+ @Override
+ public boolean configure(CamelContext camelContext, Object obj, String
name, Object value, boolean ignoreCase) {
+ OpenAIComponent target = (OpenAIComponent) obj;
+ switch (ignoreCase ? name.toLowerCase() : name) {
+ case "apikey":
+ case "apiKey": target.setApiKey(property(camelContext,
java.lang.String.class, value)); return true;
+ case "autowiredenabled":
+ case "autowiredEnabled":
target.setAutowiredEnabled(property(camelContext, boolean.class, value));
return true;
+ case "baseurl":
+ case "baseUrl": target.setBaseUrl(property(camelContext,
java.lang.String.class, value)); return true;
+ case "lazystartproducer":
+ case "lazyStartProducer":
target.setLazyStartProducer(property(camelContext, boolean.class, value));
return true;
+ case "model": target.setModel(property(camelContext,
java.lang.String.class, value)); return true;
+ default: return false;
+ }
+ }
+
+ @Override
+ public Class<?> getOptionType(String name, boolean ignoreCase) {
+ switch (ignoreCase ? name.toLowerCase() : name) {
+ case "apikey":
+ case "apiKey": return java.lang.String.class;
+ case "autowiredenabled":
+ case "autowiredEnabled": return boolean.class;
+ case "baseurl":
+ case "baseUrl": return java.lang.String.class;
+ case "lazystartproducer":
+ case "lazyStartProducer": return boolean.class;
+ case "model": return java.lang.String.class;
+ default: return null;
+ }
+ }
+
+ @Override
+ public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
+ OpenAIComponent target = (OpenAIComponent) obj;
+ switch (ignoreCase ? name.toLowerCase() : name) {
+ case "apikey":
+ case "apiKey": return target.getApiKey();
+ case "autowiredenabled":
+ case "autowiredEnabled": return target.isAutowiredEnabled();
+ case "baseurl":
+ case "baseUrl": return target.getBaseUrl();
+ case "lazystartproducer":
+ case "lazyStartProducer": return target.isLazyStartProducer();
+ case "model": return target.getModel();
+ default: return null;
+ }
+ }
+}
+
diff --git
a/components/camel-ai/camel-openai/src/generated/java/org/apache/camel/component/openai/OpenAIEndpointConfigurer.java
b/components/camel-ai/camel-openai/src/generated/java/org/apache/camel/component/openai/OpenAIEndpointConfigurer.java
new file mode 100644
index 000000000000..810e34fa7413
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/generated/java/org/apache/camel/component/openai/OpenAIEndpointConfigurer.java
@@ -0,0 +1,132 @@
+/* Generated by camel build tools - do NOT edit this file! */
+package org.apache.camel.component.openai;
+
+import javax.annotation.processing.Generated;
+import java.util.Map;
+
+import org.apache.camel.CamelContext;
+import org.apache.camel.spi.ExtendedPropertyConfigurerGetter;
+import org.apache.camel.spi.PropertyConfigurerGetter;
+import org.apache.camel.spi.ConfigurerStrategy;
+import org.apache.camel.spi.GeneratedPropertyConfigurer;
+import org.apache.camel.util.CaseInsensitiveMap;
+import org.apache.camel.support.component.PropertyConfigurerSupport;
+
+/**
+ * Generated by camel build tools - do NOT edit this file!
+ */
+@Generated("org.apache.camel.maven.packaging.EndpointSchemaGeneratorMojo")
+@SuppressWarnings("unchecked")
+public class OpenAIEndpointConfigurer extends PropertyConfigurerSupport
implements GeneratedPropertyConfigurer, PropertyConfigurerGetter {
+
+ @Override
+ public boolean configure(CamelContext camelContext, Object obj, String
name, Object value, boolean ignoreCase) {
+ OpenAIEndpoint target = (OpenAIEndpoint) obj;
+ switch (ignoreCase ? name.toLowerCase() : name) {
+ case "apikey":
+ case "apiKey":
target.getConfiguration().setApiKey(property(camelContext,
java.lang.String.class, value)); return true;
+ case "baseurl":
+ case "baseUrl":
target.getConfiguration().setBaseUrl(property(camelContext,
java.lang.String.class, value)); return true;
+ case "conversationhistoryproperty":
+ case "conversationHistoryProperty":
target.getConfiguration().setConversationHistoryProperty(property(camelContext,
java.lang.String.class, value)); return true;
+ case "conversationmemory":
+ case "conversationMemory":
target.getConfiguration().setConversationMemory(property(camelContext,
boolean.class, value)); return true;
+ case "developermessage":
+ case "developerMessage":
target.getConfiguration().setDeveloperMessage(property(camelContext,
java.lang.String.class, value)); return true;
+ case "jsonschema":
+ case "jsonSchema":
target.getConfiguration().setJsonSchema(property(camelContext,
java.lang.String.class, value)); return true;
+ case "lazystartproducer":
+ case "lazyStartProducer":
target.setLazyStartProducer(property(camelContext, boolean.class, value));
return true;
+ case "maxtokens":
+ case "maxTokens":
target.getConfiguration().setMaxTokens(property(camelContext,
java.lang.Integer.class, value)); return true;
+ case "model":
target.getConfiguration().setModel(property(camelContext,
java.lang.String.class, value)); return true;
+ case "outputclass":
+ case "outputClass":
target.getConfiguration().setOutputClass(property(camelContext,
java.lang.String.class, value)); return true;
+ case "storefullresponse":
+ case "storeFullResponse":
target.getConfiguration().setStoreFullResponse(property(camelContext,
boolean.class, value)); return true;
+ case "streaming":
target.getConfiguration().setStreaming(property(camelContext, boolean.class,
value)); return true;
+ case "systemmessage":
+ case "systemMessage":
target.getConfiguration().setSystemMessage(property(camelContext,
java.lang.String.class, value)); return true;
+ case "temperature":
target.getConfiguration().setTemperature(property(camelContext,
java.lang.Double.class, value)); return true;
+ case "topp":
+ case "topP": target.getConfiguration().setTopP(property(camelContext,
java.lang.Double.class, value)); return true;
+ case "usermessage":
+ case "userMessage":
target.getConfiguration().setUserMessage(property(camelContext,
java.lang.String.class, value)); return true;
+ default: return false;
+ }
+ }
+
+ @Override
+ public Class<?> getOptionType(String name, boolean ignoreCase) {
+ switch (ignoreCase ? name.toLowerCase() : name) {
+ case "apikey":
+ case "apiKey": return java.lang.String.class;
+ case "baseurl":
+ case "baseUrl": return java.lang.String.class;
+ case "conversationhistoryproperty":
+ case "conversationHistoryProperty": return java.lang.String.class;
+ case "conversationmemory":
+ case "conversationMemory": return boolean.class;
+ case "developermessage":
+ case "developerMessage": return java.lang.String.class;
+ case "jsonschema":
+ case "jsonSchema": return java.lang.String.class;
+ case "lazystartproducer":
+ case "lazyStartProducer": return boolean.class;
+ case "maxtokens":
+ case "maxTokens": return java.lang.Integer.class;
+ case "model": return java.lang.String.class;
+ case "outputclass":
+ case "outputClass": return java.lang.String.class;
+ case "storefullresponse":
+ case "storeFullResponse": return boolean.class;
+ case "streaming": return boolean.class;
+ case "systemmessage":
+ case "systemMessage": return java.lang.String.class;
+ case "temperature": return java.lang.Double.class;
+ case "topp":
+ case "topP": return java.lang.Double.class;
+ case "usermessage":
+ case "userMessage": return java.lang.String.class;
+ default: return null;
+ }
+ }
+
+ @Override
+ public Object getOptionValue(Object obj, String name, boolean ignoreCase) {
+ OpenAIEndpoint target = (OpenAIEndpoint) obj;
+ switch (ignoreCase ? name.toLowerCase() : name) {
+ case "apikey":
+ case "apiKey": return target.getConfiguration().getApiKey();
+ case "baseurl":
+ case "baseUrl": return target.getConfiguration().getBaseUrl();
+ case "conversationhistoryproperty":
+ case "conversationHistoryProperty": return
target.getConfiguration().getConversationHistoryProperty();
+ case "conversationmemory":
+ case "conversationMemory": return
target.getConfiguration().isConversationMemory();
+ case "developermessage":
+ case "developerMessage": return
target.getConfiguration().getDeveloperMessage();
+ case "jsonschema":
+ case "jsonSchema": return target.getConfiguration().getJsonSchema();
+ case "lazystartproducer":
+ case "lazyStartProducer": return target.isLazyStartProducer();
+ case "maxtokens":
+ case "maxTokens": return target.getConfiguration().getMaxTokens();
+ case "model": return target.getConfiguration().getModel();
+ case "outputclass":
+ case "outputClass": return target.getConfiguration().getOutputClass();
+ case "storefullresponse":
+ case "storeFullResponse": return
target.getConfiguration().isStoreFullResponse();
+ case "streaming": return target.getConfiguration().isStreaming();
+ case "systemmessage":
+ case "systemMessage": return
target.getConfiguration().getSystemMessage();
+ case "temperature": return target.getConfiguration().getTemperature();
+ case "topp":
+ case "topP": return target.getConfiguration().getTopP();
+ case "usermessage":
+ case "userMessage": return target.getConfiguration().getUserMessage();
+ default: return null;
+ }
+ }
+}
+
diff --git
a/components/camel-ai/camel-openai/src/generated/java/org/apache/camel/component/openai/OpenAIEndpointUriFactory.java
b/components/camel-ai/camel-openai/src/generated/java/org/apache/camel/component/openai/OpenAIEndpointUriFactory.java
new file mode 100644
index 000000000000..43a2de620e74
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/generated/java/org/apache/camel/component/openai/OpenAIEndpointUriFactory.java
@@ -0,0 +1,88 @@
+/* Generated by camel build tools - do NOT edit this file! */
+package org.apache.camel.component.openai;
+
+import javax.annotation.processing.Generated;
+import java.net.URISyntaxException;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.Map;
+import java.util.Set;
+
+import org.apache.camel.spi.EndpointUriFactory;
+
+/**
+ * Generated by camel build tools - do NOT edit this file!
+ */
+@Generated("org.apache.camel.maven.packaging.GenerateEndpointUriFactoryMojo")
+public class OpenAIEndpointUriFactory extends
org.apache.camel.support.component.EndpointUriFactorySupport implements
EndpointUriFactory {
+
+ private static final String BASE = ":operation";
+
+ private static final Set<String> PROPERTY_NAMES;
+ private static final Set<String> SECRET_PROPERTY_NAMES;
+ private static final Map<String, String> MULTI_VALUE_PREFIXES;
+ static {
+ Set<String> props = new HashSet<>(17);
+ props.add("apiKey");
+ props.add("baseUrl");
+ props.add("conversationHistoryProperty");
+ props.add("conversationMemory");
+ props.add("developerMessage");
+ props.add("jsonSchema");
+ props.add("lazyStartProducer");
+ props.add("maxTokens");
+ props.add("model");
+ props.add("operation");
+ props.add("outputClass");
+ props.add("storeFullResponse");
+ props.add("streaming");
+ props.add("systemMessage");
+ props.add("temperature");
+ props.add("topP");
+ props.add("userMessage");
+ PROPERTY_NAMES = Collections.unmodifiableSet(props);
+ Set<String> secretProps = new HashSet<>(1);
+ secretProps.add("apiKey");
+ SECRET_PROPERTY_NAMES = Collections.unmodifiableSet(secretProps);
+ MULTI_VALUE_PREFIXES = Collections.emptyMap();
+ }
+
+ @Override
+ public boolean isEnabled(String scheme) {
+ return "openai".equals(scheme);
+ }
+
+ @Override
+ public String buildUri(String scheme, Map<String, Object> properties,
boolean encode) throws URISyntaxException {
+ String syntax = scheme + BASE;
+ String uri = syntax;
+
+ Map<String, Object> copy = new HashMap<>(properties);
+
+ uri = buildPathParameter(syntax, uri, "operation", null, true, copy);
+ uri = buildQueryParameters(uri, copy, encode);
+ return uri;
+ }
+
+ @Override
+ public Set<String> propertyNames() {
+ return PROPERTY_NAMES;
+ }
+
+ @Override
+ public Set<String> secretPropertyNames() {
+ return SECRET_PROPERTY_NAMES;
+ }
+
+ @Override
+ public Map<String, String> multiValuePrefixes() {
+ return MULTI_VALUE_PREFIXES;
+ }
+
+ @Override
+ public boolean isLenientProperties() {
+ return false;
+ }
+}
+
diff --git
a/components/camel-ai/camel-openai/src/generated/resources/META-INF/org/apache/camel/component/openai/openai.json
b/components/camel-ai/camel-openai/src/generated/resources/META-INF/org/apache/camel/component/openai/openai.json
new file mode 100644
index 000000000000..d018ffaa223a
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/generated/resources/META-INF/org/apache/camel/component/openai/openai.json
@@ -0,0 +1,52 @@
+{
+ "component": {
+ "kind": "component",
+ "name": "openai",
+ "title": "OpenAI",
+ "description": "OpenAI endpoint for chat completion.",
+ "deprecated": false,
+ "firstVersion": "4.17.0",
+ "label": "ai",
+ "javaType": "org.apache.camel.component.openai.OpenAIComponent",
+ "supportLevel": "Preview",
+ "groupId": "org.apache.camel",
+ "artifactId": "camel-openai",
+ "version": "4.17.0-SNAPSHOT",
+ "scheme": "openai",
+ "extendsScheme": "",
+ "syntax": "openai:operation",
+ "async": false,
+ "api": false,
+ "consumerOnly": false,
+ "producerOnly": true,
+ "lenientProperties": false,
+ "browsable": false,
+ "remote": true
+ },
+ "componentProperties": {
+ "apiKey": { "index": 0, "kind": "property", "displayName": "Api Key",
"group": "producer", "label": "", "required": false, "type": "string",
"javaType": "java.lang.String", "deprecated": false, "autowired": false,
"secret": false, "description": "Default API key for all endpoints" },
+ "baseUrl": { "index": 1, "kind": "property", "displayName": "Base Url",
"group": "producer", "label": "", "required": false, "type": "string",
"javaType": "java.lang.String", "deprecated": false, "autowired": false,
"secret": false, "description": "Default base URL for all endpoints" },
+ "lazyStartProducer": { "index": 2, "kind": "property", "displayName":
"Lazy Start Producer", "group": "producer", "label": "producer", "required":
false, "type": "boolean", "javaType": "boolean", "deprecated": false,
"autowired": false, "secret": false, "defaultValue": false, "description":
"Whether the producer should be started lazy (on the first message). By
starting lazy you can use this to allow CamelContext and routes to startup in
situations where a producer may otherwise fail [...]
+ "model": { "index": 3, "kind": "property", "displayName": "Model",
"group": "producer", "label": "", "required": false, "type": "string",
"javaType": "java.lang.String", "deprecated": false, "autowired": false,
"secret": false, "description": "Default model for all endpoints" },
+ "autowiredEnabled": { "index": 4, "kind": "property", "displayName":
"Autowired Enabled", "group": "advanced", "label": "advanced", "required":
false, "type": "boolean", "javaType": "boolean", "deprecated": false,
"autowired": false, "secret": false, "defaultValue": true, "description":
"Whether autowiring is enabled. This is used for automatic autowiring options
(the option must be marked as autowired) by looking up in the registry to find
if there is a single instance of matching t [...]
+ },
+ "properties": {
+ "operation": { "index": 0, "kind": "path", "displayName": "Operation",
"group": "producer", "label": "", "required": true, "type": "string",
"javaType": "java.lang.String", "deprecated": false, "deprecationNote": "",
"autowired": false, "secret": false, "description": "The operation to perform
(currently only chat-completion is supported)" },
+ "apiKey": { "index": 1, "kind": "parameter", "displayName": "Api Key",
"group": "producer", "label": "", "required": false, "type": "string",
"javaType": "java.lang.String", "deprecated": false, "deprecationNote": "",
"autowired": false, "secret": true, "configurationClass":
"org.apache.camel.component.openai.OpenAIConfiguration", "configurationField":
"configuration", "description": "OpenAI API key. Can also be set via
OPENAI_API_KEY environment variable." },
+ "baseUrl": { "index": 2, "kind": "parameter", "displayName": "Base Url",
"group": "producer", "label": "", "required": false, "type": "string",
"javaType": "java.lang.String", "deprecated": false, "deprecationNote": "",
"autowired": false, "secret": false, "configurationClass":
"org.apache.camel.component.openai.OpenAIConfiguration", "configurationField":
"configuration", "description": "Base URL for OpenAI API. Defaults to OpenAI's
official endpoint. Can be used for local or third-p [...]
+ "conversationHistoryProperty": { "index": 3, "kind": "parameter",
"displayName": "Conversation History Property", "group": "producer", "label":
"", "required": false, "type": "string", "javaType": "java.lang.String",
"deprecated": false, "deprecationNote": "", "autowired": false, "secret":
false, "defaultValue": "CamelOpenAIConversationHistory", "configurationClass":
"org.apache.camel.component.openai.OpenAIConfiguration", "configurationField":
"configuration", "description": "Exchan [...]
+ "conversationMemory": { "index": 4, "kind": "parameter", "displayName":
"Conversation Memory", "group": "producer", "label": "", "required": false,
"type": "boolean", "javaType": "boolean", "deprecated": false,
"deprecationNote": "", "autowired": false, "secret": false, "defaultValue":
false, "configurationClass":
"org.apache.camel.component.openai.OpenAIConfiguration", "configurationField":
"configuration", "description": "Enable conversation memory per Exchange" },
+ "developerMessage": { "index": 5, "kind": "parameter", "displayName":
"Developer Message", "group": "producer", "label": "", "required": false,
"type": "string", "javaType": "java.lang.String", "deprecated": false,
"deprecationNote": "", "autowired": false, "secret": false,
"configurationClass": "org.apache.camel.component.openai.OpenAIConfiguration",
"configurationField": "configuration", "description": "Developer message to
prepend before user messages" },
+ "jsonSchema": { "index": 6, "kind": "parameter", "displayName": "Json
Schema", "group": "producer", "label": "", "required": false, "type": "string",
"javaType": "java.lang.String", "deprecated": false, "deprecationNote": "",
"autowired": false, "secret": false, "supportFileReference": true,
"configurationClass": "org.apache.camel.component.openai.OpenAIConfiguration",
"configurationField": "configuration", "description": "JSON schema for
structured output validation" },
+ "maxTokens": { "index": 7, "kind": "parameter", "displayName": "Max
Tokens", "group": "producer", "label": "", "required": false, "type":
"integer", "javaType": "java.lang.Integer", "deprecated": false,
"deprecationNote": "", "autowired": false, "secret": false,
"configurationClass": "org.apache.camel.component.openai.OpenAIConfiguration",
"configurationField": "configuration", "description": "Maximum number of tokens
to generate" },
+ "model": { "index": 8, "kind": "parameter", "displayName": "Model",
"group": "producer", "label": "", "required": false, "type": "string",
"javaType": "java.lang.String", "deprecated": false, "deprecationNote": "",
"autowired": false, "secret": false, "defaultValue": "gpt-5",
"configurationClass": "org.apache.camel.component.openai.OpenAIConfiguration",
"configurationField": "configuration", "description": "The model to use for
chat completion" },
+ "outputClass": { "index": 9, "kind": "parameter", "displayName": "Output
Class", "group": "producer", "label": "", "required": false, "type": "string",
"javaType": "java.lang.String", "deprecated": false, "deprecationNote": "",
"autowired": false, "secret": false, "configurationClass":
"org.apache.camel.component.openai.OpenAIConfiguration", "configurationField":
"configuration", "description": "Fully qualified class name for structured
output using response format" },
+ "storeFullResponse": { "index": 10, "kind": "parameter", "displayName":
"Store Full Response", "group": "producer", "label": "", "required": false,
"type": "boolean", "javaType": "boolean", "deprecated": false,
"deprecationNote": "", "autowired": false, "secret": false, "defaultValue":
false, "configurationClass":
"org.apache.camel.component.openai.OpenAIConfiguration", "configurationField":
"configuration", "description": "Store the full response in the exchange
property 'CamelOpenA [...]
+ "streaming": { "index": 11, "kind": "parameter", "displayName":
"Streaming", "group": "producer", "label": "", "required": false, "type":
"boolean", "javaType": "boolean", "deprecated": false, "deprecationNote": "",
"autowired": false, "secret": false, "defaultValue": false,
"configurationClass": "org.apache.camel.component.openai.OpenAIConfiguration",
"configurationField": "configuration", "description": "Enable streaming
responses" },
+ "systemMessage": { "index": 12, "kind": "parameter", "displayName":
"System Message", "group": "producer", "label": "", "required": false, "type":
"string", "javaType": "java.lang.String", "deprecated": false,
"deprecationNote": "", "autowired": false, "secret": false,
"configurationClass": "org.apache.camel.component.openai.OpenAIConfiguration",
"configurationField": "configuration", "description": "System message to
prepend. When set and conversationMemory is enabled, the conversat [...]
+ "temperature": { "index": 13, "kind": "parameter", "displayName":
"Temperature", "group": "producer", "label": "", "required": false, "type":
"number", "javaType": "java.lang.Double", "deprecated": false,
"deprecationNote": "", "autowired": false, "secret": false, "defaultValue":
"1.0", "configurationClass":
"org.apache.camel.component.openai.OpenAIConfiguration", "configurationField":
"configuration", "description": "Temperature for response generation (0.0 to
2.0)" },
+ "topP": { "index": 14, "kind": "parameter", "displayName": "Top P",
"group": "producer", "label": "", "required": false, "type": "number",
"javaType": "java.lang.Double", "deprecated": false, "deprecationNote": "",
"autowired": false, "secret": false, "configurationClass":
"org.apache.camel.component.openai.OpenAIConfiguration", "configurationField":
"configuration", "description": "Top P for response generation (0.0 to 1.0)" },
+ "userMessage": { "index": 15, "kind": "parameter", "displayName": "User
Message", "group": "producer", "label": "", "required": false, "type":
"string", "javaType": "java.lang.String", "deprecated": false,
"deprecationNote": "", "autowired": false, "secret": false,
"configurationClass": "org.apache.camel.component.openai.OpenAIConfiguration",
"configurationField": "configuration", "description": "Default user message
text to use when no prompt is provided" },
+ "lazyStartProducer": { "index": 16, "kind": "parameter", "displayName":
"Lazy Start Producer", "group": "producer (advanced)", "label":
"producer,advanced", "required": false, "type": "boolean", "javaType":
"boolean", "deprecated": false, "autowired": false, "secret": false,
"defaultValue": false, "description": "Whether the producer should be started
lazy (on the first message). By starting lazy you can use this to allow
CamelContext and routes to startup in situations where a produ [...]
+ }
+}
diff --git
a/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/component.properties
b/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/component.properties
new file mode 100644
index 000000000000..e2eb3d2309b7
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/component.properties
@@ -0,0 +1,7 @@
+# Generated by camel build tools - do NOT edit this file!
+components=openai
+groupId=org.apache.camel
+artifactId=camel-openai
+version=4.17.0-SNAPSHOT
+projectName=Camel :: AI :: OpenAI
+projectDescription=Camel OpenAI component for chat completion using OpenAI API
diff --git
a/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/component/openai
b/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/component/openai
new file mode 100644
index 000000000000..c4a02da9c69a
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/component/openai
@@ -0,0 +1,2 @@
+# Generated by camel build tools - do NOT edit this file!
+class=org.apache.camel.component.openai.OpenAIComponent
diff --git
a/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/configurer/openai-component
b/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/configurer/openai-component
new file mode 100644
index 000000000000..c6d82e5acb36
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/configurer/openai-component
@@ -0,0 +1,2 @@
+# Generated by camel build tools - do NOT edit this file!
+class=org.apache.camel.component.openai.OpenAIComponentConfigurer
diff --git
a/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/configurer/openai-endpoint
b/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/configurer/openai-endpoint
new file mode 100644
index 000000000000..937f3eaf9319
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/configurer/openai-endpoint
@@ -0,0 +1,2 @@
+# Generated by camel build tools - do NOT edit this file!
+class=org.apache.camel.component.openai.OpenAIEndpointConfigurer
diff --git
a/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/urifactory/openai-endpoint
b/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/urifactory/openai-endpoint
new file mode 100644
index 000000000000..6db939fdef33
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/generated/resources/META-INF/services/org/apache/camel/urifactory/openai-endpoint
@@ -0,0 +1,2 @@
+# Generated by camel build tools - do NOT edit this file!
+class=org.apache.camel.component.openai.OpenAIEndpointUriFactory
diff --git
a/components/camel-ai/camel-openai/src/main/docs/openai-component.adoc
b/components/camel-ai/camel-openai/src/main/docs/openai-component.adoc
new file mode 100644
index 000000000000..3e7d1fda619d
--- /dev/null
+++ b/components/camel-ai/camel-openai/src/main/docs/openai-component.adoc
@@ -0,0 +1,329 @@
+= OpenAI Component
+:doctitle: OpenAI
+:shortname: openai
+:artifactid: camel-openai
+:description: OpenAI endpoint for chat completion.
+:since: 4.17
+:supportlevel: Preview
+:tabs-sync-option:
+:component-header: Only producer is supported
+//Manually maintained attributes
+:group: AI
+
+*Since Camel {since}*
+
+*{component-header}*
+
+The OpenAI component provides integration with OpenAI and OpenAI-compatible
APIs for chat completion using the official openai-java SDK.
+
+Maven users will need to add the following dependency to their `pom.xml` for
this component:
+
+[source,xml]
+----
+<dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-openai</artifactId>
+ <version>x.x.x</version>
+ <!-- use the same version as your Camel core version -->
+</dependency>
+----
+
+== URI Format
+
+[source]
+----
+openai:operation[?options]
+----
+
+Currently, only the `chat-completion` operation is supported.
+
+// component-configure options: START
+
+// component-configure options: END
+
+// component options: START
+include::partial$component-configure-options.adoc[]
+include::partial$component-endpoint-options.adoc[]
+// component options: END
+
+// endpoint options: START
+
+// endpoint options: END
+
+// component headers: START
+include::partial$component-endpoint-headers.adoc[]
+// component headers: END
+
+== Usage
+
+=== Authentication
+
+Set `baseUrl` to your provider's endpoint
(default:`https://api.openai.com/v1`).
+
+API key resolution order:
+
+- Endpoint `apiKey`
+- Component `apiKey`
+- Environment variable `OPENAI_API_KEY`
+- System property `openai.api.key`
+
+[NOTE]
+====
+The API key can be omitted if using OpenAI-compatible providers that don't
require authentication (e.g., some local LLM servers).
+====
+
+=== Basic Chat Completion with String Input
+
+[tabs]
+====
+Java::
++
+[source,java]
+----
+from("direct:chat")
+ .setBody(constant("What is Apache Camel?"))
+ .to("openai:chat-completion")
+ .log("Response: ${body}");
+----
+
+YAML::
++
+[source,yaml]
+----
+- route:
+ from:
+ uri: direct:chat
+ steps:
+ - to:
+ uri: openai:chat-completion
+ parameters:
+ userMessage: What is Apache Camel?
+ - log: "Response: ${body}"
+----
+====
+
+=== File-Backed Prompt with Text File
+
+.Usage example:
+[source,java]
+----
+from("file:prompts?noop=true")
+ .to("openai:chat-completion")
+ .log("Response: ${body}");
+----
+
+
+
+=== Image File Input with Vision Model
+
+.Usage example:
+[source,java]
+----
+from("file:images?noop=true")
+ .to("openai:chat-completion?model=gpt-4.1-mini?userMessage=Describe what
you see in this image")
+ .log("Response: ${body}");
+----
+
+[NOTE]
+====
+When using image files, the userMessage is required. Supported image formats
are detected by MIME type (e.g., `image/png`, `image/jpeg`, `image/gif`,
`image/webp`).
+====
+
+=== Streaming Response
+
+When `streaming=true`, the component returns an
`Iterator<ChatCompletionChunk>` in the message body. You can consume this
iterator using Camel's streaming EIPs or process it directly:
+
+.Usage example:
+[source,yaml]
+----
+- route:
+ id: route-1145
+ from:
+ id: from-1972
+ uri: timer
+ parameters:
+ repeatCount: 1
+ timerName: timer
+ steps:
+ - to:
+ id: to-1301
+ uri: openai:chat-completion
+ parameters:
+ userMessage: In one sentence, what is Apache Camel?
+ streaming: true
+ - split:
+ id: split-3196
+ steps:
+ - marshal:
+ id: marshal-3773
+ json:
+ library: Jackson
+ - log:
+ id: log-6722
+ message: ${body}
+ simple:
+ expression: ${body}
+ streaming: true
+----
+
+=== Structured Output with outputClass
+
+.When `outputClass` is set, the model is instructed to produce JSON matching
the given class, but the component returns the raw String. Deserialize the body
yourself (e.g., with Camel's Jackson) if you need a typed object.
+
+.Usage example:
+[source,java]
+----
+public class Person {
+ public String name;
+ public int age;
+ public String occupation;
+}
+
+from("direct:structured")
+ .setBody(constant("Generate a person profile for a software engineer"))
+
.to("openai:chat-completion?baseUrl=https://api.openai.com/v1&outputClass=com.example.Person")
+ .log("Structured response: ${body}");
+----
+
+=== Structured Output with JSON Schema
+
+The `jsonSchema` option instructs the model to return JSON that conforms to
the provided schema. The response will be valid JSON but is not automatically
validated against the schema:
+
+.Usage example:
+[source,java]
+----
+from("direct:json-schema")
+ .setBody(constant("Create a product description"))
+ .setHeader("CamelOpenAIJsonSchema",
constant("{\"type\":\"object\",\"properties\":{\"name\":{\"type\":\"string\"},\"price\":{\"type\":\"number\"}}}"))
+ .to("openai:chat-completion")
+ .log("JSON response: ${body}");
+----
+
+You can also load the schema from a resource file:
+
+.Usage example:
+[source,java]
+----
+from("direct:json-schema-resource")
+ .setBody(constant("Create a product description"))
+
.to("openai:chat-completion?jsonSchema=resource:classpath:schemas/product.schema.json")
+ .log("JSON response: ${body}");
+----
+
+[NOTE]
+====
+For full schema validation, integrate with the `camel-json-validator`
component after receiving the response.
+====
+
+=== Conversation Memory (Per Exchange)
+
+.Usage example:
+[source,java]
+----
+from("direct:conversation")
+ .setBody(constant("My name is Alice"))
+ .to("openai:chat-completion?conversationMemory=true")
+ .log("First response: ${body}")
+ .setBody(constant("What is my name?"))
+ .to("openai:chat-completion?conversationMemory=true")
+ .log("Second response: ${body}"); // Will remember "Alice"
+----
+
+=== Using Third-Party or Local OpenAI-Compatible Endpoint
+
+.Usage example:
+[source,java]
+----
+from("direct:local")
+ .setBody(constant("Hello from local LLM"))
+
.to("openai:chat-completion?baseUrl=http://localhost:1234/v1&model=local-model")
+ .log("${body}");
+----
+
+== Input Handling
+
+The component accepts the following types of input in the message body:
+
+1. *String*: The prompt text is taken directly from the body
+2. *File*: Used for file-based prompts. The component handles two types of
files:
+ * *Text files* (MIME type starting with `text/`): The file content is read
and used as the prompt. If userMessage endpoint option or
`CamelOpenAIUserMessage` is set, it overrides the file content
+ * *Image files* (MIME type starting with `image/`): The file is encoded as
a base64 data URL and sent to vision-capable models. The userMessage is
**required** when using image files
+
+[NOTE]
+====
+When using `File` input, the component uses `Files.probeContentType()` to
detect the file type. Ensure your system has proper MIME type detection
configured.
+====
+
+== Output Handling
+
+=== Default Mode
+The full model response is returned as a String in the message body.
+
+=== Streaming Mode
+When `streaming=true`, the message body contains an
`Iterator<ChatCompletionChunk>` suitable for Camel streaming EIPs (such as
`split()` with `streaming()`).
+
+IMPORTANT:
+* Resource cleanup is handled automatically when the Exchange completes
(success or failure)
+* Conversation memory is **not** automatically updated for streaming responses
(only for non-streaming responses)
+
+=== Structured Outputs
+
+==== Using outputClass
+The model is instructed to return JSON matching the specified class, but the
response body remains a String.
+
+==== Using jsonSchema
+The `jsonSchema` option instructs the model to return JSON conforming to the
provided schema. The response will be valid JSON but is not automatically
validated against the schema. For full schema validation, integrate with the
`camel-json-validator` component after receiving the response.
+
+The JSON schema must be a valid JSON object. Invalid schema strings will
result in an `IllegalArgumentException`.
+
+== Conversation Memory
+
+When `conversationMemory=true`, the component maintains conversation history
in the `CamelOpenAIConversationHistory` exchange property (configurable via
`conversationHistoryProperty` option). This history is scoped to a single
Exchange and allows multi-turn conversations within a route.
+
+IMPORTANT:
+* Conversation history is automatically updated with each assistant response
for **non-streaming** responses only
+* The history is stored as a `List<ChatCompletionMessageParam>` in the
Exchange property
+* The history persists across multiple calls to the endpoint within the same
Exchange
+* You can manually set the `CamelOpenAIConversationHistory` exchange property
to provide custom conversation context
+
+Example of manual conversation history:
+
+.Usage example:
+[source,java]
+----
+List<ChatCompletionMessageParam> history = new ArrayList<>();
+history.add(ChatCompletionMessageParam.ofUser(/* ... */));
+history.add(ChatCompletionMessageParam.ofAssistant(/* ... */));
+
+from("direct:with-history")
+ .setBody(constant("Continue the conversation"))
+ .setProperty("CamelOpenAIConversationHistory", constant(history))
+ .to("openai:chat-completion?conversationMemory=true")
+ .log("${body}");
+----
+
+== Compatibility
+
+This component works with any OpenAI API-compatible endpoint by setting the
`baseUrl` parameter. This includes:
+
+- OpenAI official API (`https://api.openai.com/v1`)
+- Azure OpenAI (may require additional configuration)
+- Local LLM servers (e.g., Ollama, LM Studio, LocalAI)
+- Third-party OpenAI-compatible providers
+
+[NOTE]
+====
+When using local or third-party providers, ensure they support the chat
completions API endpoint format. Some providers may have different
authentication requirements or API variations.
+====
+
+== Error Handling
+
+The component may throw the following exceptions:
+
+* `IllegalArgumentException`:
+ ** When an invalid operation is specified (only `chat-completion` is
supported)
+ ** When message body or user message is missing
+ ** When image file is provided without userMessage
+ ** When unsupported file type is provided (only text and image files are
supported)
+ ** When invalid JSON schema string is provided
+* API-specific exceptions from the OpenAI SDK for network errors,
authentication failures, rate limiting, etc.
diff --git
a/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIComponent.java
b/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIComponent.java
new file mode 100644
index 000000000000..c8980308302a
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIComponent.java
@@ -0,0 +1,86 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.openai;
+
+import java.util.Map;
+
+import org.apache.camel.Endpoint;
+import org.apache.camel.spi.Metadata;
+import org.apache.camel.spi.annotations.Component;
+import org.apache.camel.support.DefaultComponent;
+
+/**
+ * OpenAI component for chat completion.
+ */
+@Component("openai")
+public class OpenAIComponent extends DefaultComponent {
+
+ @Metadata(description = "Default API key for all endpoints")
+ private String apiKey;
+
+ @Metadata(description = "Default base URL for all endpoints")
+ private String baseUrl;
+
+ @Metadata(description = "Default model for all endpoints")
+ private String model;
+
+ @Override
+ protected Endpoint createEndpoint(String uri, String remaining,
Map<String, Object> parameters) throws Exception {
+ OpenAIConfiguration configuration = new OpenAIConfiguration();
+
+ if (apiKey != null) {
+ configuration.setApiKey(apiKey);
+ }
+ if (baseUrl != null) {
+ configuration.setBaseUrl(baseUrl);
+ }
+ if (model != null) {
+ configuration.setModel(model);
+ }
+
+ OpenAIEndpoint endpoint = new OpenAIEndpoint(uri, this, configuration);
+ // set the operation from the URI path (e.g., chat-completion)
+ endpoint.setOperation(remaining);
+ setProperties(endpoint, parameters);
+
+ return endpoint;
+ }
+
+ public String getApiKey() {
+ return apiKey;
+ }
+
+ public void setApiKey(String apiKey) {
+ this.apiKey = apiKey;
+ }
+
+ public String getBaseUrl() {
+ return baseUrl;
+ }
+
+ public void setBaseUrl(String baseUrl) {
+ this.baseUrl = baseUrl;
+ }
+
+ public String getModel() {
+ return model;
+ }
+
+ public void setModel(String model) {
+ this.model = model;
+ }
+}
diff --git
a/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIConfiguration.java
b/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIConfiguration.java
new file mode 100644
index 000000000000..e09948a9616e
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIConfiguration.java
@@ -0,0 +1,218 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.openai;
+
+import org.apache.camel.spi.Metadata;
+import org.apache.camel.spi.UriParam;
+import org.apache.camel.spi.UriParams;
+
+/**
+ * Configuration for OpenAI component.
+ */
+@UriParams
+public class OpenAIConfiguration implements Cloneable {
+
+ @UriParam(secret = true)
+ @Metadata(description = "OpenAI API key. Can also be set via
OPENAI_API_KEY environment variable.", secret = true)
+ private String apiKey;
+
+ @UriParam
+ @Metadata(description = "Base URL for OpenAI API. Defaults to OpenAI's
official endpoint. Can be used for local or third-party providers.")
+ private String baseUrl;
+
+ @UriParam(defaultValue = "gpt-5")
+ @Metadata(description = "The model to use for chat completion")
+ private String model = "gpt-5";
+
+ @UriParam(defaultValue = "1.0")
+ @Metadata(description = "Temperature for response generation (0.0 to 2.0)")
+ private Double temperature = 1.0;
+
+ @UriParam
+ @Metadata(description = "Top P for response generation (0.0 to 1.0)")
+ private Double topP;
+
+ @UriParam
+ @Metadata(description = "Maximum number of tokens to generate")
+ private Integer maxTokens;
+
+ @UriParam(defaultValue = "false")
+ @Metadata(description = "Enable streaming responses")
+ private boolean streaming = false;
+
+ @UriParam
+ @Metadata(description = "Fully qualified class name for structured output
using response format")
+ private String outputClass;
+
+ @UriParam
+ @Metadata(description = "JSON schema for structured output validation",
supportFileReference = true, largeInput = true,
+ inputLanguage = "json")
+ private String jsonSchema;
+
+ @UriParam(defaultValue = "false")
+ @Metadata(description = "Enable conversation memory per Exchange")
+ private boolean conversationMemory = false;
+
+ @UriParam(defaultValue = "CamelOpenAIConversationHistory")
+ @Metadata(description = "Exchange property name for storing conversation
history")
+ private String conversationHistoryProperty =
"CamelOpenAIConversationHistory";
+
+ @UriParam
+ @Metadata(description = "Default user message text to use when no prompt
is provided", largeInput = true)
+ private String userMessage;
+
+ @UriParam
+ @Metadata(description = "System message to prepend. When set and
conversationMemory is enabled, the conversation history is reset.",
+ largeInput = true)
+ private String systemMessage;
+
+ @UriParam
+ @Metadata(description = "Developer message to prepend before user
messages", largeInput = true)
+ private String developerMessage;
+
+ @UriParam(defaultValue = "false")
+ @Metadata(description = "Store the full response in the exchange property
'CamelOpenAIResponse' in non-streaming mode")
+ private boolean storeFullResponse = false;
+
+ public String getApiKey() {
+ return apiKey;
+ }
+
+ public void setApiKey(String apiKey) {
+ this.apiKey = apiKey;
+ }
+
+ public String getBaseUrl() {
+ return baseUrl;
+ }
+
+ public void setBaseUrl(String baseUrl) {
+ this.baseUrl = baseUrl;
+ }
+
+ public String getModel() {
+ return model;
+ }
+
+ public void setModel(String model) {
+ this.model = model;
+ }
+
+ public Double getTemperature() {
+ return temperature;
+ }
+
+ public void setTemperature(Double temperature) {
+ this.temperature = temperature;
+ }
+
+ public Double getTopP() {
+ return topP;
+ }
+
+ public void setTopP(Double topP) {
+ this.topP = topP;
+ }
+
+ public Integer getMaxTokens() {
+ return maxTokens;
+ }
+
+ public void setMaxTokens(Integer maxTokens) {
+ this.maxTokens = maxTokens;
+ }
+
+ public boolean isStreaming() {
+ return streaming;
+ }
+
+ public void setStreaming(boolean streaming) {
+ this.streaming = streaming;
+ }
+
+ public String getOutputClass() {
+ return outputClass;
+ }
+
+ public void setOutputClass(String outputClass) {
+ this.outputClass = outputClass;
+ }
+
+ public String getJsonSchema() {
+ return jsonSchema;
+ }
+
+ public void setJsonSchema(String jsonSchema) {
+ this.jsonSchema = jsonSchema;
+ }
+
+ public boolean isConversationMemory() {
+ return conversationMemory;
+ }
+
+ public void setConversationMemory(boolean conversationMemory) {
+ this.conversationMemory = conversationMemory;
+ }
+
+ public String getConversationHistoryProperty() {
+ return conversationHistoryProperty;
+ }
+
+ public void setConversationHistoryProperty(String
conversationHistoryProperty) {
+ this.conversationHistoryProperty = conversationHistoryProperty;
+ }
+
+ public String getUserMessage() {
+ return userMessage;
+ }
+
+ public void setUserMessage(String userMessage) {
+ this.userMessage = userMessage;
+ }
+
+ public String getSystemMessage() {
+ return systemMessage;
+ }
+
+ public void setSystemMessage(String systemMessage) {
+ this.systemMessage = systemMessage;
+ }
+
+ public String getDeveloperMessage() {
+ return developerMessage;
+ }
+
+ public void setDeveloperMessage(String developerMessage) {
+ this.developerMessage = developerMessage;
+ }
+
+ public boolean isStoreFullResponse() {
+ return storeFullResponse;
+ }
+
+ public void setStoreFullResponse(boolean storeFullResponse) {
+ this.storeFullResponse = storeFullResponse;
+ }
+
+ public OpenAIConfiguration copy() {
+ try {
+ return (OpenAIConfiguration) clone();
+ } catch (CloneNotSupportedException e) {
+ throw new RuntimeException(e);
+ }
+ }
+}
diff --git
a/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIConstants.java
b/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIConstants.java
new file mode 100644
index 000000000000..d82537c7fde4
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIConstants.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.openai;
+
+/**
+ * Constants for OpenAI component headers and properties.
+ */
+public final class OpenAIConstants {
+
+ // Input Headers
+ public static final String USER_MESSAGE = "CamelOpenAIUserMessage";
+ public static final String SYSTEM_MESSAGE = "CamelOpenAISystemMessage";
+ public static final String DEVELOPER_MESSAGE =
"CamelOpenAIDeveloperMessage";
+ public static final String MODEL = "CamelOpenAIModel";
+ public static final String TEMPERATURE = "CamelOpenAITemperature";
+ public static final String TOP_P = "CamelOpenAITopP";
+ public static final String MAX_TOKENS = "CamelOpenAIMaxTokens";
+ public static final String STREAMING = "CamelOpenAIStreaming";
+ public static final String OUTPUT_CLASS = "CamelOpenAIOutputClass";
+ public static final String JSON_SCHEMA = "CamelOpenAIJsonSchema";
+
+ // Output Headers
+ public static final String RESPONSE_MODEL = "CamelOpenAIResponseModel";
+ public static final String RESPONSE_ID = "CamelOpenAIResponseId";
+ public static final String FINISH_REASON = "CamelOpenAIFinishReason";
+ public static final String PROMPT_TOKENS = "CamelOpenAIPromptTokens";
+ public static final String COMPLETION_TOKENS =
"CamelOpenAICompletionTokens";
+ public static final String TOTAL_TOKENS = "CamelOpenAITotalTokens";
+
+ // Output Exchange Properties
+ public static final String RESPONSE = "CamelOpenAIResponse";
+
+ private OpenAIConstants() {
+ // Utility class
+ }
+}
diff --git
a/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIEndpoint.java
b/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIEndpoint.java
new file mode 100644
index 000000000000..09795aa3574d
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIEndpoint.java
@@ -0,0 +1,127 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.openai;
+
+import com.openai.client.OpenAIClient;
+import com.openai.client.okhttp.OpenAIOkHttpClient;
+import org.apache.camel.Category;
+import org.apache.camel.Consumer;
+import org.apache.camel.Processor;
+import org.apache.camel.Producer;
+import org.apache.camel.spi.Metadata;
+import org.apache.camel.spi.UriEndpoint;
+import org.apache.camel.spi.UriParam;
+import org.apache.camel.spi.UriPath;
+import org.apache.camel.support.DefaultEndpoint;
+import org.apache.camel.util.ObjectHelper;
+
+/**
+ * OpenAI endpoint for chat completion.
+ */
+@UriEndpoint(firstVersion = "4.17.0",
+ scheme = "openai",
+ title = "OpenAI",
+ syntax = "openai:operation",
+ category = { Category.AI },
+ producerOnly = true)
+public class OpenAIEndpoint extends DefaultEndpoint {
+
+ @UriPath
+ @Metadata(required = true, description = "The operation to perform
(currently only chat-completion is supported)")
+ private String operation;
+
+ @UriParam
+ private OpenAIConfiguration configuration;
+
+ private OpenAIClient client;
+
+ public OpenAIEndpoint(String uri, OpenAIComponent component,
OpenAIConfiguration configuration) {
+ super(uri, component);
+ this.configuration = configuration;
+ }
+
+ @Override
+ public Producer createProducer() throws Exception {
+ if (!"chat-completion".equals(operation)) {
+ throw new IllegalArgumentException("Only 'chat-completion'
operation is supported");
+ }
+ return new OpenAIProducer(this);
+ }
+
+ @Override
+ public Consumer createConsumer(Processor processor) throws Exception {
+ throw new UnsupportedOperationException("Consumer not supported for
OpenAI component");
+ }
+
+ @Override
+ protected void doStart() throws Exception {
+ super.doStart();
+ client = createClient();
+ }
+
+ @Override
+ protected void doStop() throws Exception {
+ if (client != null) {
+ client = null;
+ }
+ super.doStop();
+ }
+
+ protected OpenAIClient createClient() {
+ String apiKey = resolveApiKey();
+
+ OpenAIOkHttpClient.Builder builder = OpenAIOkHttpClient.builder();
+
+ if (ObjectHelper.isNotEmpty(apiKey)) {
+ builder.apiKey(apiKey);
+ }
+
+
builder.baseUrl(ObjectHelper.notNullOrEmpty(configuration.getBaseUrl(),
"baseUrl"));
+
+ return builder.build();
+ }
+
+ protected String resolveApiKey() {
+ // Priority: URI parameter > component config > environment variable >
application.properties
+ if (ObjectHelper.isNotEmpty(configuration.getApiKey())) {
+ return configuration.getApiKey();
+ }
+
+ String envApiKey = System.getenv("OPENAI_API_KEY");
+ if (ObjectHelper.isNotEmpty(envApiKey)) {
+ return envApiKey;
+ }
+
+ return System.getProperty("openai.api.key");
+ }
+
+ public String getOperation() {
+ return operation;
+ }
+
+ public void setOperation(String operation) {
+ this.operation = operation;
+ }
+
+ public OpenAIConfiguration getConfiguration() {
+ return configuration;
+ }
+
+ public OpenAIClient getClient() {
+ return client;
+ }
+}
diff --git
a/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIProducer.java
b/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIProducer.java
new file mode 100644
index 000000000000..70132f7eb20c
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/main/java/org/apache/camel/component/openai/OpenAIProducer.java
@@ -0,0 +1,445 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.openai;
+
+import java.io.File;
+import java.io.InputStream;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Path;
+import java.util.ArrayList;
+import java.util.Base64;
+import java.util.Iterator;
+import java.util.List;
+
+import com.fasterxml.jackson.databind.ObjectMapper;
+import com.openai.core.JsonValue;
+import com.openai.core.http.StreamResponse;
+import com.openai.models.ResponseFormatJsonSchema;
+import com.openai.models.chat.completions.ChatCompletion;
+import com.openai.models.chat.completions.ChatCompletionAssistantMessageParam;
+import com.openai.models.chat.completions.ChatCompletionChunk;
+import com.openai.models.chat.completions.ChatCompletionContentPart;
+import com.openai.models.chat.completions.ChatCompletionContentPartImage;
+import com.openai.models.chat.completions.ChatCompletionContentPartText;
+import com.openai.models.chat.completions.ChatCompletionCreateParams;
+import com.openai.models.chat.completions.ChatCompletionDeveloperMessageParam;
+import com.openai.models.chat.completions.ChatCompletionMessageParam;
+import com.openai.models.chat.completions.ChatCompletionSystemMessageParam;
+import com.openai.models.chat.completions.ChatCompletionUserMessageParam;
+import com.openai.models.completions.CompletionUsage;
+import org.apache.camel.AsyncCallback;
+import org.apache.camel.Exchange;
+import org.apache.camel.Message;
+import org.apache.camel.spi.Synchronization;
+import org.apache.camel.support.DefaultAsyncProducer;
+import org.apache.camel.support.ResourceHelper;
+import org.apache.camel.util.ObjectHelper;
+
+/**
+ * OpenAI producer for chat completion.
+ */
+public class OpenAIProducer extends DefaultAsyncProducer {
+
+ private static final ObjectMapper OBJECT_MAPPER = new ObjectMapper();
+
+ public OpenAIProducer(OpenAIEndpoint endpoint) {
+ super(endpoint);
+ }
+
+ @Override
+ protected void doStart() throws Exception {
+ OpenAIConfiguration config = getEndpoint().getConfiguration();
+
+ if (ObjectHelper.isNotEmpty(config.getJsonSchema())) {
+ String resolved =
getEndpoint().getCamelContext().resolvePropertyPlaceholders(config.getJsonSchema());
+ String content = resolveResourceContent(resolved);
+ if (content != null) {
+ config.setJsonSchema(content);
+ } else {
+ config.setJsonSchema(resolved);
+ }
+ }
+
+ super.doStart();
+ }
+
+ private String resolveResourceContent(String property) {
+ try (InputStream is =
ResourceHelper.resolveResourceAsInputStream(getEndpoint().getCamelContext(),
property)) {
+ if (is != null) {
+ return
getEndpoint().getCamelContext().getTypeConverter().convertTo(String.class, is);
+ }
+ } catch (Exception e) {
+ // ignore and treat the value as inline content
+ }
+ return null;
+ }
+
+ @Override
+ public OpenAIEndpoint getEndpoint() {
+ return (OpenAIEndpoint) super.getEndpoint();
+ }
+
+ @Override
+ public boolean process(Exchange exchange, AsyncCallback callback) {
+ try {
+ processInternal(exchange);
+ callback.done(true);
+ return true;
+ } catch (Exception e) {
+ exchange.setException(e);
+ callback.done(true);
+ return true;
+ }
+ }
+
+ private void processInternal(Exchange exchange) throws Exception {
+ OpenAIConfiguration config = getEndpoint().getConfiguration();
+ Message in = exchange.getIn();
+
+ // Resolve parameters from headers or configuration
+ String model = resolveParameter(in, OpenAIConstants.MODEL,
config.getModel(), String.class);
+ Double temperature = resolveParameter(in, OpenAIConstants.TEMPERATURE,
config.getTemperature(), Double.class);
+ Double topP = resolveParameter(in, OpenAIConstants.TOP_P,
config.getTopP(), Double.class);
+ Integer maxTokens = resolveParameter(in, OpenAIConstants.MAX_TOKENS,
config.getMaxTokens(), Integer.class);
+ Boolean streaming = resolveParameter(in, OpenAIConstants.STREAMING,
config.isStreaming(), Boolean.class);
+ String outputClass = resolveParameter(in,
OpenAIConstants.OUTPUT_CLASS, config.getOutputClass(), String.class);
+ String jsonSchema = resolveParameter(in, OpenAIConstants.JSON_SCHEMA,
config.getJsonSchema(), String.class);
+
+ List<ChatCompletionMessageParam> messages = buildMessages(exchange,
config);
+
+ ChatCompletionCreateParams.Builder paramsBuilder =
ChatCompletionCreateParams.builder()
+ .model(model);
+
+ for (ChatCompletionMessageParam message : messages) {
+ paramsBuilder.addMessage(message);
+ }
+
+ if (temperature != null) {
+ paramsBuilder.temperature(temperature);
+ }
+ if (topP != null) {
+ paramsBuilder.topP(topP);
+ }
+ if (maxTokens != null) {
+ paramsBuilder.maxTokens(maxTokens.longValue());
+ }
+
+ // Structured output handling
+ if (ObjectHelper.isNotEmpty(outputClass)) {
+ Class<?> responseClass =
getEndpoint().getCamelContext().getClassResolver().resolveClass(outputClass);
+ if (responseClass != null) {
+ paramsBuilder.responseFormat(responseClass);
+ }
+ } else if (ObjectHelper.isNotEmpty(jsonSchema)) {
+ // Build OpenAI JSON schema response format from provided schema
string
+ try {
+ ResponseFormatJsonSchema.JsonSchema.Schema schema =
buildSchemaFromJson(jsonSchema);
+ ResponseFormatJsonSchema.JsonSchema jsonSchemaObj =
ResponseFormatJsonSchema.JsonSchema.builder()
+ .name("camel_schema")
+ .schema(schema)
+ .build();
+ paramsBuilder.responseFormat(
+ ResponseFormatJsonSchema.builder()
+ .jsonSchema(jsonSchemaObj)
+ .build());
+ } catch (Exception e) {
+ throw new IllegalArgumentException("Invalid JSON schema
content provided in header/option", e);
+ }
+ }
+
+ ChatCompletionCreateParams params = paramsBuilder.build();
+
+ if (Boolean.TRUE.equals(streaming)) {
+ processStreaming(exchange, params);
+ } else {
+ processNonStreaming(exchange, params, config);
+ }
+ }
+
+ private List<ChatCompletionMessageParam> buildMessages(Exchange exchange,
OpenAIConfiguration config)
+ throws Exception {
+ Message in = exchange.getIn();
+ List<ChatCompletionMessageParam> messages = new ArrayList<>();
+
+ // If a system message is configured and conversation memory is
enabled, reset
+ // history
+ if (ObjectHelper.isNotEmpty(config.getSystemMessage()) &&
config.isConversationMemory()) {
+ in.removeHeader(config.getConversationHistoryProperty());
+ }
+
+ String systemPrompt = in.getHeader(OpenAIConstants.SYSTEM_MESSAGE,
String.class);
+ String developerPrompt =
in.getHeader(OpenAIConstants.DEVELOPER_MESSAGE, String.class);
+ if (systemPrompt == null || systemPrompt.isEmpty() &&
ObjectHelper.isNotEmpty(config.getSystemMessage())) {
+ systemPrompt = config.getSystemMessage();
+ }
+ if (developerPrompt == null
+ || developerPrompt.isEmpty() &&
ObjectHelper.isNotEmpty(config.getDeveloperMessage())) {
+ developerPrompt = config.getDeveloperMessage();
+ }
+
+ // Prepend system and developer messages when configured
+ if (ObjectHelper.isNotEmpty(systemPrompt)) {
+ messages.add(createSystemMessage(systemPrompt));
+ }
+ if (ObjectHelper.isNotEmpty(developerPrompt)) {
+ messages.add(createDeveloperMessage(developerPrompt));
+ }
+
+ addConversationHistory(messages, in, config);
+
+ ChatCompletionMessageParam userMessage = buildUserMessage(in, config);
+ messages.add(userMessage);
+
+ return messages;
+ }
+
+ private void addConversationHistory(
+ List<ChatCompletionMessageParam> messages, Message in,
+ OpenAIConfiguration config) {
+ if (!config.isConversationMemory()) {
+ return;
+ }
+
+ @SuppressWarnings("unchecked")
+ List<ChatCompletionMessageParam> history =
in.getExchange().getProperty(
+ config.getConversationHistoryProperty(),
+ List.class);
+ if (history != null) {
+ messages.addAll(history);
+ }
+ }
+
+ private ChatCompletionMessageParam buildUserMessage(Message in,
OpenAIConfiguration config) throws Exception {
+ Object body = in.getBody();
+ String userPrompt = in.getHeader(OpenAIConstants.USER_MESSAGE,
String.class);
+ if (userPrompt == null || userPrompt.isEmpty() &&
ObjectHelper.isNotEmpty(config.getUserMessage())) {
+ userPrompt = config.getUserMessage();
+ }
+
+ if (body instanceof File) {
+ return buildFileMessage(in, userPrompt, config);
+ } else {
+ return buildTextMessage(in, userPrompt, config);
+ }
+ }
+
+ private ChatCompletionMessageParam buildTextMessage(Message in, String
userPrompt, OpenAIConfiguration config) {
+ String prompt = userPrompt != null ? userPrompt :
in.getBody(String.class);
+ if (prompt == null || prompt.isEmpty()) {
+ throw new IllegalArgumentException(
+ "Message body or user message configuration must contain
the prompt text");
+ }
+ return createTextMessage(prompt);
+ }
+
+ private ChatCompletionMessageParam buildFileMessage(Message in, String
userPrompt, OpenAIConfiguration config)
+ throws Exception {
+ File inputFile = in.getBody(File.class);
+ Path path = inputFile.toPath();
+ String mime = Files.probeContentType(path);
+
+ if (mime != null && mime.startsWith("text/")) {
+ // Handle text files - read content and use buildTextMessage logic
+ String prompt = userPrompt;
+ if (prompt == null || prompt.isEmpty()) {
+ prompt = new String(Files.readAllBytes(path),
StandardCharsets.UTF_8);
+ }
+
+ if (prompt == null || prompt.isEmpty()) {
+ throw new IllegalArgumentException(
+ "File content or user message configuration must
contain the prompt text");
+ }
+ return createTextMessage(prompt);
+ } else if (mime != null && mime.startsWith("image/")) {
+ // Handle image files - require user prompt and combine with image
+ if (userPrompt == null || userPrompt.isEmpty()) {
+ throw new IllegalArgumentException("User message configuration
must be set when using image File body");
+ }
+
+ ChatCompletionContentPart imageContentPart =
createImageContentPart(inputFile, mime);
+ ChatCompletionContentPart textContentPart =
createTextContentPart(userPrompt);
+
+ return ChatCompletionMessageParam.ofUser(
+ ChatCompletionUserMessageParam.builder()
+
.content(ChatCompletionUserMessageParam.Content.ofArrayOfContentParts(
+ List.of(textContentPart,
imageContentPart)))
+ .build());
+ } else {
+ throw new IllegalArgumentException("Only text and image files are
supported");
+ }
+ }
+
+ private ChatCompletionMessageParam createTextMessage(String prompt) {
+ return ChatCompletionMessageParam.ofUser(
+ ChatCompletionUserMessageParam.builder()
+
.content(ChatCompletionUserMessageParam.Content.ofText(prompt))
+ .build());
+ }
+
+ private ChatCompletionMessageParam createSystemMessage(String text) {
+ return ChatCompletionMessageParam.ofSystem(
+ ChatCompletionSystemMessageParam.builder()
+
.content(ChatCompletionSystemMessageParam.Content.ofText(text))
+ .build());
+ }
+
+ private ChatCompletionMessageParam createDeveloperMessage(String text) {
+ return ChatCompletionMessageParam.ofDeveloper(
+ ChatCompletionDeveloperMessageParam.builder()
+
.content(ChatCompletionDeveloperMessageParam.Content.ofText(text))
+ .build());
+ }
+
+ private ChatCompletionContentPart createImageContentPart(File inputFile,
String mime) throws Exception {
+ Path path = inputFile.toPath();
+ byte[] img = Files.readAllBytes(path);
+ String dataUrl = "data:" + mime + ";base64," +
Base64.getEncoder().encodeToString(img);
+
+ return ChatCompletionContentPart.ofImageUrl(
+ ChatCompletionContentPartImage.builder()
+
.imageUrl(ChatCompletionContentPartImage.ImageUrl.builder()
+ .url(dataUrl)
+ .build())
+ .build());
+ }
+
+ private ChatCompletionContentPart createTextContentPart(String text) {
+ return ChatCompletionContentPart.ofText(
+ ChatCompletionContentPartText.builder()
+ .text(text)
+ .build());
+ }
+
+ private void processNonStreaming(Exchange exchange,
ChatCompletionCreateParams params, OpenAIConfiguration config)
+ throws Exception {
+ ChatCompletion response =
getEndpoint().getClient().chat().completions().create(params);
+ if (config.isStoreFullResponse()) {
+ exchange.setProperty(OpenAIConstants.RESPONSE, response);
+ }
+
+ // if finish reason is tool_calls, set the body to the tool calls
+ if
(response.choices().get(0).finishReason().equals(ChatCompletion.Choice.FinishReason.TOOL_CALLS))
{
+
exchange.getMessage().setBody(response.choices().get(0).message().toolCalls());
+ } else {
+ // Extract response content
+ String content =
response.choices().get(0).message().content().orElse("");
+ exchange.getMessage().setBody(content);
+ }
+ setResponseHeaders(exchange.getMessage(), response);
+
+ // Update conversation history if enabled
+ updateConversationHistory(exchange, params, response);
+ }
+
+ private void processStreaming(Exchange exchange,
ChatCompletionCreateParams params) {
+ StreamResponse<ChatCompletionChunk> streamResponse =
getEndpoint().getClient().chat().completions()
+ .createStreaming(params);
+
+ // hand Camel an Iterator for streaming EIPs (split, recipientList,
etc.)
+ Iterator<ChatCompletionChunk> it = streamResponse.stream().iterator();
+ exchange.getMessage().setBody(it);
+
+ // ensure resp.close() after the Exchange completes (success or
failure)
+ exchange.getUnitOfWork().addSynchronization(new Synchronization() {
+ @Override
+ public void onComplete(Exchange e) {
+ safeClose();
+ }
+
+ @Override
+ public void onFailure(Exchange e) {
+ safeClose();
+ }
+
+ private void safeClose() {
+ try {
+ streamResponse.close();
+ } catch (Exception ignore) {
+ }
+ }
+ });
+
+ }
+
+ private void setResponseHeaders(Message message, ChatCompletion response) {
+ message.setHeader(OpenAIConstants.RESPONSE_ID, response.id());
+ message.setHeader(OpenAIConstants.RESPONSE_MODEL, response.model());
+
+ if (!response.choices().isEmpty()) {
+ ChatCompletion.Choice choice = response.choices().get(0);
+ message.setHeader(OpenAIConstants.FINISH_REASON,
choice.finishReason().toString());
+ }
+
+ if (response.usage().isPresent()) {
+ CompletionUsage usage = response.usage().get();
+ message.setHeader(OpenAIConstants.PROMPT_TOKENS,
usage.promptTokens());
+ message.setHeader(OpenAIConstants.COMPLETION_TOKENS,
usage.completionTokens());
+ message.setHeader(OpenAIConstants.TOTAL_TOKENS,
usage.totalTokens());
+ }
+ }
+
+ private void updateConversationHistory(
+ Exchange exchange, ChatCompletionCreateParams params,
+ ChatCompletion response) {
+ OpenAIConfiguration config = getEndpoint().getConfiguration();
+ if (!config.isConversationMemory()) {
+ return;
+ }
+
+ @SuppressWarnings("unchecked")
+ List<ChatCompletionMessageParam> history = exchange.getProperty(
+ config.getConversationHistoryProperty(),
+ List.class);
+
+ if (history == null) {
+ history = new ArrayList<>();
+ }
+
+ // Add assistant response to history
+ String assistantContent =
response.choices().get(0).message().content().orElse("");
+ ChatCompletionMessageParam assistantMessage =
ChatCompletionMessageParam.ofAssistant(
+ ChatCompletionAssistantMessageParam.builder()
+
.content(ChatCompletionAssistantMessageParam.Content.ofText(assistantContent))
+ .build());
+
+ history.add(assistantMessage);
+ exchange.setProperty(config.getConversationHistoryProperty(), history);
+ }
+
+ private ResponseFormatJsonSchema.JsonSchema.Schema
buildSchemaFromJson(String jsonSchemaString) throws Exception {
+ @SuppressWarnings("unchecked")
+ java.util.Map<String, Object> root =
OBJECT_MAPPER.readValue(jsonSchemaString, java.util.Map.class);
+ if (root == null) {
+ throw new IllegalArgumentException("JSON schema string parsed to
null");
+ }
+ if (!(root instanceof java.util.Map)) {
+ throw new IllegalArgumentException("JSON schema must be a JSON
object at the root");
+ }
+ ResponseFormatJsonSchema.JsonSchema.Schema.Builder sb =
ResponseFormatJsonSchema.JsonSchema.Schema.builder();
+ for (java.util.Map.Entry<String, Object> e : root.entrySet()) {
+ sb.putAdditionalProperty(e.getKey(), JsonValue.from(e.getValue()));
+ }
+ return sb.build();
+ }
+
+ private <T> T resolveParameter(Message message, String headerName, T
defaultValue, Class<T> type) {
+ T headerValue = message.getHeader(headerName, type);
+ return ObjectHelper.isNotEmpty(headerValue) ? headerValue :
defaultValue;
+ }
+
+}
diff --git
a/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/OpenAIProducerMockTest.java
b/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/OpenAIProducerMockTest.java
new file mode 100644
index 000000000000..2054c6ebb6c7
--- /dev/null
+++
b/components/camel-ai/camel-openai/src/test/java/org/apache/camel/component/openai/OpenAIProducerMockTest.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.camel.component.openai;
+
+import org.apache.camel.Exchange;
+import org.apache.camel.builder.RouteBuilder;
+import org.apache.camel.test.infra.openai.mock.OpenAIMock;
+import org.apache.camel.test.junit5.CamelTestSupport;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.extension.RegisterExtension;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
+public class OpenAIProducerMockTest extends CamelTestSupport {
+
+ @RegisterExtension
+ public OpenAIMock openAIMock = new OpenAIMock().builder()
+ .when("hello")
+ .replyWith("Hi from mock")
+ .end()
+ .when("json please")
+ .replyWith("{\"ok\":true}")
+ .end()
+ .build();
+
+ @Override
+ protected RouteBuilder createRouteBuilder() {
+ return new RouteBuilder() {
+ @Override
+ public void configure() {
+ // Basic chat route using the mock server
+ from("direct:chat")
+
.to("openai:chat-completion?model=gpt-5&apiKey=dummy&baseUrl=" +
openAIMock.getBaseUrl()
+ + "/v1");
+
+ // Streaming chat route using the mock server
+ from("direct:chat-stream")
+
.to("openai:chat-completion?model=gpt-5&apiKey=dummy&streaming=true&baseUrl="
+ + openAIMock.getBaseUrl() + "/v1");
+ }
+ };
+ }
+
+ @Test
+ void basicChatReturnsMockedResponse() {
+ Exchange result = template.request("direct:chat", e ->
e.getIn().setBody("hello"));
+ assertEquals("Hi from mock",
result.getMessage().getBody(String.class));
+
assertNotNull(result.getMessage().getHeader(OpenAIConstants.RESPONSE_ID));
+ assertEquals("openai-mock",
result.getMessage().getHeader(OpenAIConstants.RESPONSE_MODEL));
+ }
+
+ @Test
+ void jsonSchemaHeaderParsesJsonContent() {
+ Exchange result = template.request("direct:chat", e -> {
+ e.getIn().setBody("json please");
+ e.getIn().setHeader(OpenAIConstants.JSON_SCHEMA,
+
"{\"type\":\"object\",\"properties\":{\"ok\":{\"type\":\"boolean\"}}}");
+ });
+ String body = result.getMessage().getBody(String.class);
+ assertNotNull(body);
+ assertEquals("{\"ok\":true}", body);
+ }
+
+ @Test
+ void streamingChatReturnsIteratorOfChunks() {
+ Exchange result = template.request("direct:chat-stream", e ->
e.getIn().setBody("hello"));
+
+ Object body = result.getMessage().getBody();
+ assertNotNull(body);
+ assertTrue(body instanceof java.util.Iterator);
+ }
+
+}
diff --git a/components/camel-ai/pom.xml b/components/camel-ai/pom.xml
index 8c3637254bd2..0d148b2c289e 100644
--- a/components/camel-ai/pom.xml
+++ b/components/camel-ai/pom.xml
@@ -50,6 +50,7 @@
<module>camel-langchain4j-web-search</module>
<module>camel-milvus</module>
<module>camel-neo4j</module>
+ <module>camel-openai</module>
<module>camel-pinecone</module>
<module>camel-qdrant</module>
<module>camel-tensorflow-serving</module>
diff --git a/docs/components/modules/ROOT/examples/json/openai.json
b/docs/components/modules/ROOT/examples/json/openai.json
new file mode 120000
index 000000000000..38fb0a98ee4e
--- /dev/null
+++ b/docs/components/modules/ROOT/examples/json/openai.json
@@ -0,0 +1 @@
+../../../../../../components/camel-ai/camel-openai/src/generated/resources/META-INF/org/apache/camel/component/openai/openai.json
\ No newline at end of file
diff --git a/docs/components/modules/ROOT/nav.adoc
b/docs/components/modules/ROOT/nav.adoc
index a34a017b40fd..6f98ce0cbb8c 100644
--- a/docs/components/modules/ROOT/nav.adoc
+++ b/docs/components/modules/ROOT/nav.adoc
@@ -17,6 +17,7 @@
*** xref:langchain4j-web-search-component.adoc[LangChain4j Web Search]
*** xref:milvus-component.adoc[Milvus]
*** xref:neo4j-component.adoc[Neo4j]
+*** xref:openai-component.adoc[OpenAI]
*** xref:pinecone-component.adoc[Pinecone]
*** xref:qdrant-component.adoc[Qdrant]
*** xref:tensorflow-serving-component.adoc[TensorFlow Serving]
diff --git a/docs/components/modules/ROOT/pages/openai-component.adoc
b/docs/components/modules/ROOT/pages/openai-component.adoc
new file mode 120000
index 000000000000..83cfe033c9c1
--- /dev/null
+++ b/docs/components/modules/ROOT/pages/openai-component.adoc
@@ -0,0 +1 @@
+../../../../../components/camel-ai/camel-openai/src/main/docs/openai-component.adoc
\ No newline at end of file
diff --git a/parent/pom.xml b/parent/pom.xml
index 0e4ca6a7210b..a0d2987ed32a 100644
--- a/parent/pom.xml
+++ b/parent/pom.xml
@@ -416,6 +416,7 @@
<olingo2-version>2.0.13</olingo2-version>
<olingo4-version>5.0.0</olingo4-version>
<ognl-version>3.4.8</ognl-version>
+ <openai-java-version>4.7.1</openai-java-version>
<openapi-generator-version>7.17.0</openapi-generator-version>
<openjpa-version>4.1.1</openjpa-version>
<opensearch-rest-client-version>3.3.2</opensearch-rest-client-version>
@@ -2141,6 +2142,11 @@
<artifactId>camel-olingo4-api</artifactId>
<version>${project.version}</version>
</dependency>
+ <dependency>
+ <groupId>org.apache.camel</groupId>
+ <artifactId>camel-openai</artifactId>
+ <version>${project.version}</version>
+ </dependency>
<dependency>
<groupId>org.apache.camel</groupId>
<artifactId>camel-openapi-java</artifactId>
@@ -3339,6 +3345,11 @@
</exclusion>
</exclusions>
</dependency>
+ <dependency>
+ <groupId>com.openai</groupId>
+ <artifactId>openai-java</artifactId>
+ <version>${openai-java-version}</version>
+ </dependency>
<dependency>
<groupId>commons-codec</groupId>