alnzng commented on code in PR #478:
URL: https://github.com/apache/flink-agents/pull/478#discussion_r2728471004


##########
python/flink_agents/integrations/chat_models/azure/azure_openai_chat_model.py:
##########
@@ -0,0 +1,260 @@
+################################################################################
+#  Licensed to the Apache Software Foundation (ASF) under one
+#  or more contributor license agreements.  See the NOTICE file
+#  distributed with this work for additional information
+#  regarding copyright ownership.  The ASF licenses this file
+#  to you under the Apache License, Version 2.0 (the
+#  "License"); you may not use this file except in compliance
+#  with the License.  You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+#  Unless required by applicable law or agreed to in writing, software
+#  distributed under the License is distributed on an "AS IS" BASIS,
+#  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#  See the License for the specific language governing permissions and
+# limitations under the License.
+#################################################################################
+from typing import Any, Dict, List, Sequence
+
+from openai import NOT_GIVEN, AzureOpenAI
+from pydantic import Field, PrivateAttr
+
+from flink_agents.api.chat_message import ChatMessage
+from flink_agents.api.chat_models.chat_model import (
+    BaseChatModelConnection,
+    BaseChatModelSetup,
+)
+from flink_agents.api.tools.tool import Tool
+from flink_agents.integrations.chat_models.chat_model_utils import 
to_openai_tool
+from flink_agents.integrations.chat_models.openai.openai_utils import (
+    convert_from_openai_message,
+    convert_to_openai_messages,
+)
+
+
+class AzureOpenAIChatModelConnection(BaseChatModelConnection):
+    """The connection to the Azure OpenAI LLM.
+
+    Attributes:
+    ----------
+    api_key : str
+        The Azure OpenAI API key.
+    api_version : str
+        Azure OpenAI REST API version to use.
+        See more: 
https://learn.microsoft.com/en-us/azure/ai-services/openai/reference#rest-api-versioning
+    azure_endpoint : str
+        Supported Azure OpenAI endpoints. Example: 
https://{your-resource-name}.openai.azure.com
+    timeout : float
+        The number of seconds to wait for an API call before it times out.
+    max_retries : int
+        The number of times to retry the API call upon failure.
+    """
+
+    api_key: str = Field(default=None, description="The Azure OpenAI API key.")
+    api_version: str = Field(
+        default=None,
+        description="Azure OpenAI REST API version to use.",
+    )
+    azure_endpoint: str = Field(
+        default=None,
+        description="Supported Azure OpenAI endpoints. Example: 
https://{your-resource-name}.openai.azure.com";
+    )
+    timeout: float = Field(
+        default=60.0,
+        description="The number of seconds to wait for an API call before it 
times out.",
+        ge=0,
+    )
+    max_retries: int = Field(
+        default=3,
+        description="The number of times to retry the API call upon failure.",
+        ge=0,
+    )
+
+    def __init__(
+            self,
+            *,
+            api_key: str | None = None,
+            api_version: str | None = None,
+            azure_endpoint: str | None = None,
+            timeout: float = 60.0,
+            max_retries: int = 3,
+            **kwargs: Any,
+    ) -> None:
+        """Init method."""
+        super().__init__(
+            api_key=api_key,
+            api_version=api_version,
+            azure_endpoint=azure_endpoint,
+            timeout=timeout,
+            max_retries=max_retries,
+            **kwargs,
+        )
+
+    _client: AzureOpenAI | None = PrivateAttr(default=None)
+
+    @property
+    def client(self) -> AzureOpenAI:
+        """Get Azure OpenAI client."""
+        self._client = AzureOpenAI(
+            azure_endpoint=self.azure_endpoint,
+            api_key=self.api_key,
+            api_version=self.api_version,
+            timeout=self.timeout,
+            max_retries=self.max_retries,
+        )
+        if self._client is None:
+            pass
+        return self._client
+
+    def chat(self, messages: Sequence[ChatMessage], tools: List[Tool] | None = 
None, **kwargs: Any,) -> ChatMessage:
+        """Direct communication with model service for chat conversation.
+
+        Parameters
+        ----------
+        messages : Sequence[ChatMessage]
+            Input message sequence
+        tools : Optional[List]
+            List of tools that can be called by the model
+        **kwargs : Any
+            Additional parameters passed to the model service (e.g., 
temperature,
+            max_tokens, etc.)
+
+        Returns:
+        -------
+        ChatMessage
+            Model response message
+        """
+        tool_specs = None
+        if tools is not None:
+            tool_specs = [to_openai_tool(metadata=tool.metadata) for tool in 
tools]
+
+        # Extract azure_deployment and model from kwargs for special handling
+        azure_deployment = kwargs.pop("azure_deployment", "")
+        if not azure_deployment:
+            msg = "azure_deployment is required for Azure OpenAI API calls"
+            raise ValueError(msg)
+        model_name = kwargs.pop("model", None)
+
+        response = self.client.chat.completions.create(
+            # Azure OpenAI APIs use Azure deployment name as the model 
parameter
+            model=azure_deployment,
+            messages=convert_to_openai_messages(messages),
+            tools=tool_specs or NOT_GIVEN,
+            **kwargs,
+        )
+
+        extra_args = {}
+        # Record token metrics only if model name is provided
+        if model_name and response.usage:
+            extra_args["model_name"] = model_name
+            extra_args["promptTokens"] = response.usage.prompt_tokens
+            extra_args["completionTokens"] = response.usage.completion_tokens
+
+        message = response.choices[0].message
+
+        return convert_from_openai_message(message, extra_args)
+
+
+class AzureOpenAIChatModelSetup(BaseChatModelSetup):
+    """The settings for the Azure OpenAI LLM.
+
+    Attributes:
+    ----------
+    connection : str
+        Name of the referenced connection. (Inherited from BaseChatModelSetup)
+    prompt : Optional[Union[Prompt, str]
+        Prompt template or string for the model. (Inherited from 
BaseChatModelSetup)
+    tools : Optional[List[str]]
+        List of available tools to use in the chat. (Inherited from 
BaseChatModelSetup)
+    azure_deployment : str
+        Name of Azure OpenAI deployment to use.
+    model : Optional[str]

Review Comment:
   Sounds good @xintongsong @wenjin272 !
   
   Let me keep `azure_deployment` and add a new field `model` as the alias of 
`azure_deployment`, use `model_of_deployement` for token usage tracing.



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to