This is an automated email from the ASF dual-hosted git repository.

xtsong pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-agents.git


The following commit(s) were added to refs/heads/main by this push:
     new 25ea18f  [integration][python] Anthropic Chat Model APIs Integration 
(#139)
25ea18f is described below

commit 25ea18f6d684c8f65ef991cd67ad290ccd531961
Author: Alan Z. <alazh...@linkedin.com>
AuthorDate: Fri Sep 5 00:03:06 2025 -0700

    [integration][python] Anthropic Chat Model APIs Integration (#139)
---
 python/flink_agents/api/events/tool_event.py       |   6 +-
 .../chat_models/anthropic/__init__.py}             |  33 ---
 .../chat_models/anthropic/anthropic_chat_model.py  | 257 +++++++++++++++++++++
 .../chat_models/anthropic/tests/__init__.py}       |  33 ---
 .../anthropic/tests/test_anthropic_chat_model.py   |  98 ++++++++
 .../flink_agents/plan/actions/chat_model_action.py |   7 +-
 python/pyproject.toml                              |   3 +-
 7 files changed, 368 insertions(+), 69 deletions(-)

diff --git a/python/flink_agents/api/events/tool_event.py 
b/python/flink_agents/api/events/tool_event.py
index a5e5c47..6bda919 100644
--- a/python/flink_agents/api/events/tool_event.py
+++ b/python/flink_agents/api/events/tool_event.py
@@ -15,7 +15,7 @@
 #  See the License for the specific language governing permissions and
 # limitations under the License.
 
#################################################################################
-from typing import Any
+from typing import Any, Optional
 
 from flink_agents.api.events.event import Event
 
@@ -29,10 +29,14 @@ class ToolRequestEvent(Event):
         The name of the tool to be called.
     kwargs : dict
         The arguments passed to the tool.
+    external_id : Optional[str]
+        Optional identifier for storing original tool call IDs from external 
systems
+        (e.g., Anthropic tool_use_id).
     """
 
     tool: str
     kwargs: dict
+    external_id: Optional[str] = None
 
 
 class ToolResponseEvent(Event):
diff --git a/python/flink_agents/api/events/tool_event.py 
b/python/flink_agents/integrations/chat_models/anthropic/__init__.py
similarity index 59%
copy from python/flink_agents/api/events/tool_event.py
copy to python/flink_agents/integrations/chat_models/anthropic/__init__.py
index a5e5c47..e154fad 100644
--- a/python/flink_agents/api/events/tool_event.py
+++ b/python/flink_agents/integrations/chat_models/anthropic/__init__.py
@@ -15,36 +15,3 @@
 #  See the License for the specific language governing permissions and
 # limitations under the License.
 
#################################################################################
-from typing import Any
-
-from flink_agents.api.events.event import Event
-
-
-class ToolRequestEvent(Event):
-    """Event representing a tool call request.
-
-    Attributes:
-    ----------
-    tool : str
-        The name of the tool to be called.
-    kwargs : dict
-        The arguments passed to the tool.
-    """
-
-    tool: str
-    kwargs: dict
-
-
-class ToolResponseEvent(Event):
-    """Event representing a result from tool call.
-
-    Attributes:
-    ----------
-    request : ToolRequestEvent
-        The correspond request of the response.
-    response : Any
-        The response from the tool.
-    """
-
-    request: ToolRequestEvent
-    response: Any
diff --git 
a/python/flink_agents/integrations/chat_models/anthropic/anthropic_chat_model.py
 
b/python/flink_agents/integrations/chat_models/anthropic/anthropic_chat_model.py
new file mode 100644
index 0000000..061d213
--- /dev/null
+++ 
b/python/flink_agents/integrations/chat_models/anthropic/anthropic_chat_model.py
@@ -0,0 +1,257 @@
+################################################################################
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+# limitations under the License.
+#################################################################################
+import uuid
+from typing import Any, Dict, List, Optional, Sequence
+
+from anthropic import Anthropic
+from anthropic._types import NOT_GIVEN
+from anthropic.types import MessageParam, TextBlockParam, ToolParam
+from pydantic import Field, PrivateAttr
+
+from flink_agents.api.chat_message import ChatMessage, MessageRole
+from flink_agents.api.chat_models.chat_model import (
+    BaseChatModelConnection,
+    BaseChatModelSetup,
+)
+from flink_agents.api.tools.tool import BaseTool, ToolMetadata
+
+
+def to_anthropic_tool(*, metadata: ToolMetadata, skip_length_check: bool = 
False) -> ToolParam:
+    """Convert to Anthropic tool: 
https://docs.anthropic.com/en/api/messages#body-tools.""";
+    if not skip_length_check and len(metadata.description) > 1024:
+        msg = (
+            "Tool description exceeds maximum length of 1024 characters. "
+            "Please shorten your description or move it to the prompt."
+        )
+        raise ValueError(msg)
+    return {
+        "name": metadata.name,
+        "description": metadata.description,
+        "input_schema": metadata.get_parameters_dict()
+    }
+
+
+def convert_to_anthropic_message(message: ChatMessage) -> MessageParam:
+    """Convert ChatMessage to Anthropic MessageParam format."""
+    if message.role == MessageRole.TOOL:
+        return {
+            "role": MessageRole.USER.value,
+            "content": [
+                {
+                    "type": "tool_result",
+                    "tool_use_id": message.extra_args.get("external_id"),
+                    "content": message.content,
+                }
+            ],
+        }
+    elif message.role == MessageRole.ASSISTANT:
+        # Use original Anthropic content blocks if available for context
+        anthropic_content_blocks = 
message.extra_args.get("anthropic_content_blocks")
+        content = anthropic_content_blocks if anthropic_content_blocks is not 
None else message.content
+        return {
+            "role": message.role.value,
+            "content": content,  # type: ignore
+        }
+    else:
+        return {
+            "role": message.role.value,
+            "content": message.content,
+        }
+
+
+def convert_to_anthropic_messages(messages: Sequence[ChatMessage]) -> 
List[MessageParam]:
+    """Convert user/assistant messages to Anthropic input messages.
+
+    See: https://docs.anthropic.com/en/api/messages#body-messages
+    """
+    return [convert_to_anthropic_message(message) for message in messages if
+            message.role in [MessageRole.USER, MessageRole.ASSISTANT, 
MessageRole.TOOL]]
+
+
+def convert_to_anthropic_system_prompts(messages: Sequence[ChatMessage]) -> 
List[TextBlockParam]:
+    """Convert system messages to Anthropic system prompts.
+
+    See: https://docs.anthropic.com/en/api/messages#body-system
+    """
+    system_messages = [message for message in messages if message.role == 
MessageRole.SYSTEM]
+    return [
+        TextBlockParam(
+            type="text",
+            text=message.content
+        )
+        for message in system_messages
+    ]
+
+
+class AnthropicChatModelConnection(BaseChatModelConnection):
+    """Manages the connection to the Anthropic AI models for chat interactions.
+
+    Attributes:
+    ----------
+    api_key : str
+        The Anthropic API key.
+    max_retries : int
+        The number of times to retry the API call upon failure.
+    timeout : float
+        The number of seconds to wait for an API call before it times out.
+    reuse_client : bool
+        Whether to reuse the Anthropic client between requests.
+    """
+
+    api_key: str = Field(default=None, description="The Anthropic API key.")
+
+    max_retries: int = Field(
+        default=3,
+        description="The number of times to retry the API call upon failure.",
+        ge=0,
+    )
+    timeout: float = Field(
+        default=60.0,
+        description="The number of seconds to wait for an API call before it 
times out.",
+        ge=0,
+    )
+
+    def __init__(
+            self,
+            api_key: Optional[str] = None,
+            max_retries: int = 3,
+            timeout: float = 60.0,
+            **kwargs: Any,
+    ) -> None:
+        """Initialize the Anthropic chat model connection."""
+        super().__init__(
+            api_key=api_key,
+            max_retries=max_retries,
+            timeout=timeout,
+            **kwargs,
+        )
+
+    _client: Optional[Anthropic] = PrivateAttr(default=None)
+
+    @property
+    def client(self) -> Anthropic:
+        """Get or create the Anthropic client instance."""
+        if self._client is None:
+            self._client = Anthropic(api_key=self.api_key, 
max_retries=self.max_retries, timeout=self.timeout)
+        return self._client
+
+    def chat(self, messages: Sequence[ChatMessage], tools: 
Optional[List[BaseTool]] = None,
+             **kwargs: Any) -> ChatMessage:
+        """Direct communication with Anthropic model service for chat 
conversation."""
+        anthropic_tools = None
+        if tools is not None:
+            anthropic_tools = [to_anthropic_tool(metadata=tool.metadata) for 
tool in tools]
+
+        anthropic_system = convert_to_anthropic_system_prompts(messages)
+        anthropic_messages = convert_to_anthropic_messages(messages)
+
+        message = self.client.messages.create(
+            messages=anthropic_messages,
+            tools=anthropic_tools or NOT_GIVEN,
+            system=anthropic_system or NOT_GIVEN,
+            **kwargs,
+        )
+
+        if message.stop_reason == "tool_use":
+            tool_calls = [
+                {
+                    "id": uuid.uuid4(),
+                    "type": "function",
+                    "function": {
+                        "name": content_block.name,
+                        "arguments": content_block.input,
+                    },
+                    "original_id": content_block.id,
+                }
+                for content_block in message.content
+                if content_block.type == 'tool_use'
+            ]
+
+            return ChatMessage(
+                role=MessageRole(message.role),
+                content=message.content[0].text,
+                tool_calls=tool_calls,
+                extra_args={
+                    "anthropic_content_blocks": message.content
+                }
+
+            )
+        else:
+            # TODO: handle other stop_reason values according to Anthropic API:
+            #  https://docs.anthropic.com/en/api/messages#response-stop-reason
+            return ChatMessage(
+                role=MessageRole(message.role),
+                content=message.content[0].text,
+            )
+
+
+DEFAULT_ANTHROPIC_MODEL = "claude-sonnet-4-20250514"
+DEFAULT_MAX_TOKENS = 1024
+DEFAULT_TEMPERATURE = 0.1
+
+
+class AnthropicChatModelSetup(BaseChatModelSetup):
+    """The settings for Anthropic Chat Model.
+
+    Attributes:
+    ----------
+    model : str
+        Specifies the Anthropic model to use. Defaults to 
claude-sonnet-4-20250514.
+    max_tokens: int
+        The maximum number of tokens to generate before stopping. Defaults to 
1024.
+    temperature : float
+        Amount of randomness injected into the response.
+    """
+
+    model: str = Field(
+        default=DEFAULT_ANTHROPIC_MODEL, description="Specifies the Anthropic 
model to use. Defaults to "
+                                                     
"claude-sonnet-4-20250514."
+    )
+    max_tokens: int = Field(
+        default=DEFAULT_MAX_TOKENS,
+        description="The maximum number of tokens to generate before stopping. 
Defaults to 1024.",
+        ge=1,
+    )
+    temperature: float = Field(
+        default=DEFAULT_TEMPERATURE,
+        description="Amount of randomness injected into the response. Defaults 
to 0.1",
+        ge=0.0,
+        le=1.0,
+    )
+
+    def __init__(
+            self,
+            connection: str,
+            model: str = DEFAULT_ANTHROPIC_MODEL,
+            max_tokens: int = DEFAULT_MAX_TOKENS,
+            temperature: float = DEFAULT_TEMPERATURE,
+            **kwargs: Any,
+    ) -> None:
+        """Init method."""
+        super().__init__(
+            connection=connection,
+            model=model,
+            max_tokens=max_tokens,
+            temperature=temperature,
+            **kwargs,
+        )
+
+    @property
+    def model_kwargs(self) -> Dict[str, Any]:
+        """Get model-specific keyword arguments."""
+        return {"model": self.model, "max_tokens": self.max_tokens, 
"temperature": self.temperature}
diff --git a/python/flink_agents/api/events/tool_event.py 
b/python/flink_agents/integrations/chat_models/anthropic/tests/__init__.py
similarity index 59%
copy from python/flink_agents/api/events/tool_event.py
copy to python/flink_agents/integrations/chat_models/anthropic/tests/__init__.py
index a5e5c47..e154fad 100644
--- a/python/flink_agents/api/events/tool_event.py
+++ b/python/flink_agents/integrations/chat_models/anthropic/tests/__init__.py
@@ -15,36 +15,3 @@
 #  See the License for the specific language governing permissions and
 # limitations under the License.
 
#################################################################################
-from typing import Any
-
-from flink_agents.api.events.event import Event
-
-
-class ToolRequestEvent(Event):
-    """Event representing a tool call request.
-
-    Attributes:
-    ----------
-    tool : str
-        The name of the tool to be called.
-    kwargs : dict
-        The arguments passed to the tool.
-    """
-
-    tool: str
-    kwargs: dict
-
-
-class ToolResponseEvent(Event):
-    """Event representing a result from tool call.
-
-    Attributes:
-    ----------
-    request : ToolRequestEvent
-        The correspond request of the response.
-    response : Any
-        The response from the tool.
-    """
-
-    request: ToolRequestEvent
-    response: Any
diff --git 
a/python/flink_agents/integrations/chat_models/anthropic/tests/test_anthropic_chat_model.py
 
b/python/flink_agents/integrations/chat_models/anthropic/tests/test_anthropic_chat_model.py
new file mode 100644
index 0000000..45116ac
--- /dev/null
+++ 
b/python/flink_agents/integrations/chat_models/anthropic/tests/test_anthropic_chat_model.py
@@ -0,0 +1,98 @@
+################################################################################
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+# limitations under the License.
+#################################################################################
+import os
+
+import pytest
+
+from flink_agents.api.chat_message import ChatMessage, MessageRole
+from flink_agents.api.resource import Resource, ResourceType
+from flink_agents.integrations.chat_models.anthropic.anthropic_chat_model 
import (
+    AnthropicChatModelConnection,
+    AnthropicChatModelSetup,
+)
+from flink_agents.plan.tools.function_tool import from_callable
+
+test_model = os.environ.get("TEST_MODEL")
+api_key = os.environ.get("TEST_API_KEY")
+
+
+@pytest.mark.skipif(api_key is None, reason="TEST_API_KEY is not set")
+def test_anthropic_chat_model() -> None:  # noqa: D103
+    connection = AnthropicChatModelConnection(
+        name="anthropic_server", api_key=api_key
+    )
+
+    def get_resource(name: str, type: ResourceType) -> Resource:
+        if type == ResourceType.CHAT_MODEL_CONNECTION:
+            return connection
+        else:
+            return get_resource(name, ResourceType.TOOL)
+
+    chat_model = AnthropicChatModelSetup(
+        name="anthropic", model=test_model, connection="anthropic_server", 
get_resource=get_resource
+    )
+    response = chat_model.chat([ChatMessage(role=MessageRole.USER, 
content="Hello!")])
+    assert response is not None
+    assert str(response).strip() != ""
+
+
+def add(a: int, b: int) -> int:
+    """Calculate the sum of a and b.
+
+    Parameters
+    ----------
+    a : int
+        The first operand
+    b : int
+        The second operand
+
+    Returns:
+    -------
+    int:
+        The sum of a and b
+    """
+    return a + b
+
+
+@pytest.mark.skipif(api_key is None, reason="TEST_API_KEY is not set")
+def test_anthropic_chat_with_tools() -> None:  # noqa : D103
+    connection = AnthropicChatModelConnection(
+        name="anthropic_server", api_key=api_key
+    )
+
+    def get_resource(name: str, type: ResourceType) -> Resource:
+        if type == ResourceType.CHAT_MODEL_CONNECTION:
+            return connection
+        else:
+            return from_callable(name=name, func=add)
+
+    chat_model = AnthropicChatModelSetup(
+        name="anthropic",
+        model=test_model,
+        connection="anthropic_server",
+        tools=["add"],
+        get_resource=get_resource,
+    )
+    response = chat_model.chat(
+        [ChatMessage(role=MessageRole.USER, content="What is 1 + 1?")]
+    )
+    tool_calls = response.tool_calls
+    assert len(tool_calls) == 1
+    tool_call = tool_calls[0]
+    assert add(**tool_call["function"]["arguments"]) == 2
+    assert tool_call.get("original_id") is not None
diff --git a/python/flink_agents/plan/actions/chat_model_action.py 
b/python/flink_agents/plan/actions/chat_model_action.py
index 03ef48e..f51a0fb 100644
--- a/python/flink_agents/plan/actions/chat_model_action.py
+++ b/python/flink_agents/plan/actions/chat_model_action.py
@@ -57,6 +57,7 @@ def process_chat_request_or_tool_response(event: Event, ctx: 
RunnerContext) -> N
                         id=tool_call_id,
                         tool=tool_call["function"]["name"],
                         kwargs=tool_call["function"]["arguments"],
+                        external_id=tool_call.get("original_id"),
                     )
                 )
 
@@ -73,7 +74,11 @@ def process_chat_request_or_tool_response(event: Event, ctx: 
RunnerContext) -> N
                 # get the specific tool call context from short term memory
                 specific_tool_ctx = tool_context.pop(tool_call_id)
                 specific_tool_ctx.messages.append(
-                    ChatMessage(role=MessageRole.TOOL, 
content=str(event.response))
+                    ChatMessage(
+                        role=MessageRole.TOOL,
+                        content=str(event.response),
+                        extra_args={"external_id": event.request.external_id} 
if event.request.external_id else {}
+                    )
                 )
                 ctx.send_event(specific_tool_ctx)
                 # update short term memory to remove the specific tool call 
context
diff --git a/python/pyproject.toml b/python/pyproject.toml
index a586a00..528bb8d 100644
--- a/python/pyproject.toml
+++ b/python/pyproject.toml
@@ -50,7 +50,8 @@ dependencies = [
     #TODO: Seperate integration dependencies from project
     "ollama==0.4.8",
     "dashscope~=1.24.2",
-    "openai>=1.66.3"
+    "openai>=1.66.3",
+    "anthropic>=0.64.0",
 ]
 
 # Optional dependencies (dependency groups)

Reply via email to