xintongsong commented on code in PR #472:
URL: https://github.com/apache/flink-agents/pull/472#discussion_r2726439234


##########
python/flink_agents/api/resource.py:
##########
@@ -211,55 +211,104 @@ def get_resource_class(module_path: str, class_name: 
str) -> Type[Resource]:
     module = importlib.import_module(module_path)
     return getattr(module, class_name)
 
+class ResourceName:
+    """Hierarchical resource class names for pointing a resource 
implementation in
+    ResourceDescriptor.
 
-class Constant:
-    """Constant strings for pointing a built-in resource implementation."""
+    Structure:
+        - Python implementation: ResourceType.PROVIDER_RESOURCEKIND
+        - Java implementation: ResourceType.Java.PROVIDER_RESOURCEKIND
 
-    # Built-in ChatModel
-    # java wrapper
-    JAVA_CHAT_MODEL_CONNECTION = (
-        "flink_agents.api.chat_models.java_chat_model.JavaChatModelConnection"
-    )
-    JAVA_CHAT_MODEL_SETUP = (
-        "flink_agents.api.chat_models.java_chat_model.JavaChatModelSetup"
-    )
-    # ollama
-    OLLAMA_CHAT_MODEL_CONNECTION = 
"flink_agents.integrations.chat_models.ollama_chat_model.OllamaChatModelConnection"
-    OLLAMA_CHAT_MODEL_SETUP = (
-        
"flink_agents.integrations.chat_models.ollama_chat_model.OllamaChatModelSetup"
-    )
-    # anthropic
-    ANTHROPIC_CHAT_MODEL_CONNECTION = 
"flink_agents.integrations.chat_models.anthropic.anthropic_chat_model.AnthropicChatModelConnection"
-    ANTHROPIC_CHAT_MODEL_SETUP = 
"flink_agents.integrations.chat_models.anthropic.anthropic_chat_model.AnthropicChatModelSetup"
-    # Azure
-    TONGYI_CHAT_MODEL_CONNECTION = 
"flink_agents.integrations.chat_models.tongyi_chat_model.TongyiChatModelConnection"
-    TONGYI_CHAT_MODEL_SETUP = (
-        
"flink_agents.integrations.chat_models.tongyi_chat_model.TongyiChatModelSetup"
-    )
-    # OpenAI
-    OPENAI_CHAT_MODEL_CONNECTION = 
"flink_agents.integrations.chat_models.openai.openai_chat_model.OpenAIChatModelConnection"
-    OPENAI_CHAT_MODEL_SETUP = 
"flink_agents.integrations.chat_models.openai.openai_chat_model.OpenAIChatModelSetup"
-
-    # Built-in EmbeddingModel
-    # java wrapper
-    JAVA_EMBEDDING_MODEL_CONNECTION = 
"flink_agents.api.embedding_models.java_embedding_model.JavaEmbeddingModelConnection"
-    JAVA_EMBEDDING_MODEL_SETUP = (
-        
"flink_agents.api.embedding_models.java_embedding_model.JavaEmbeddingModelSetup"
-    )
-    # ollama
-    OLLAMA_EMBEDDING_MODEL_CONNECTION = 
"flink_agents.integrations.embedding_models.local.ollama_embedding_model.OllamaEmbeddingModelConnection"
-    OLLAMA_EMBEDDING_MODEL_SETUP = 
"flink_agents.integrations.embedding_models.local.ollama_embedding_model.OllamaEmbeddingModelSetup"
-
-    # OpenAI
-    OPENAI_EMBEDDING_MODEL_CONNECTION = 
"flink_agents.integrations.embedding_models.openai_embedding_model.OpenAIEmbeddingModelConnection"
-    OPENAI_EMBEDDING_MODEL_SETUP = 
"flink_agents.integrations.embedding_models.openai_embedding_model.OpenAIEmbeddingModelSetup"
-
-    # Built-in VectorStore
-    # java wrapper
-    JAVA_VECTOR_STORE = 
"flink_agents.api.vector_stores.java_vector_store.JavaVectorStore"
-    JAVA_COLLECTION_MANAGEABLE_VECTOR_STORE = 
"flink_agents.api.vector_stores.java_vector_store.JavaCollectionManageableVectorStore"
-    # chroma
-    CHROMA_VECTOR_STORE = 
"flink_agents.integrations.vector_stores.chroma.chroma_vector_store.ChromaVectorStore"
-
-    # MCP
-    MCP_SERVER = "flink_agents.integrations.mcp.mcp.MCPServer"
+    Example usage:
+        # Python implementation
+        ResourceName.ChatModel.OLLAMA_CONNECTION
+        ResourceName.ChatModel.OPENAI_SETUP
+
+        # Java implementation
+        ResourceName.ChatModel.Java.OLLAMA_CONNECTION
+    """
+
+    class ChatModel:
+        """ChatModel resource names."""
+
+        # Anthropic
+        ANTHROPIC_CONNECTION = 
"flink_agents.integrations.chat_models.anthropic.anthropic_chat_model.AnthropicChatModelConnection"
+        ANTHROPIC_SETUP = 
"flink_agents.integrations.chat_models.anthropic.anthropic_chat_model.AnthropicChatModelSetup"
+
+        # Ollama
+        OLLAMA_CONNECTION = 
"flink_agents.integrations.chat_models.ollama_chat_model.OllamaChatModelConnection"
+        OLLAMA_SETUP = 
"flink_agents.integrations.chat_models.ollama_chat_model.OllamaChatModelSetup"
+
+        # OpenAI
+        OPENAI_CONNECTION = 
"flink_agents.integrations.chat_models.openai.openai_chat_model.OpenAIChatModelConnection"
+        OPENAI_SETUP = 
"flink_agents.integrations.chat_models.openai.openai_chat_model.OpenAIChatModelSetup"
+
+        # Tongyi
+        TONGYI_CONNECTION = 
"flink_agents.integrations.chat_models.tongyi_chat_model.TongyiChatModelConnection"
+        TONGYI_SETUP = 
"flink_agents.integrations.chat_models.tongyi_chat_model.TongyiChatModelSetup"
+
+        # Java Wrapper
+        JAVA_WRAPPER_CONNECTION = 
"flink_agents.api.chat_models.java_chat_model.JavaChatModelConnection"
+        JAVA_WRAPPER_SETUP = 
"flink_agents.api.chat_models.java_chat_model.JavaChatModelSetup"
+
+        class Java:
+            """Java implementations of ChatModel."""
+
+            # Anthropic
+            ANTHROPIC_CONNECTION = 
"org.apache.flink.agents.integrations.chatmodels.anthropic.AnthropicChatModelConnection"
+            ANTHROPIC_SETUP = 
"org.apache.flink.agents.integrations.chatmodels.anthropic.AnthropicChatModelSetup"
+
+            # Azure
+            AZURE_CONNECTION = 
"org.apache.flink.agents.integrations.chatmodels.anthropic.AzureAIChatModelConnection"
+            AZURE_SETUP = 
"org.apache.flink.agents.integrations.chatmodels.anthropic.AzureAIChatModelSetup"
+
+            # Ollama
+            OLLAMA_CONNECTION = 
"org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelConnection"
+            OLLAMA_SETUP = 
"org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelSetup"
+
+            # OpenAI
+            OPENAI_CONNECTION = 
"org.apache.flink.agents.integrations.chatmodels.openai.OpenAIChatModelConnection"
+            OPENAI_SETUP = 
"org.apache.flink.agents.integrations.chatmodels.openai.OpenAIChatModelSetup"
+
+    class EmbeddingModel:
+        """EmbeddingModel resource names."""
+
+        # Ollama
+        OLLAMA_CONNECTION = 
"flink_agents.integrations.embedding_models.local.ollama_embedding_model.OllamaEmbeddingModelConnection"
+        OLLAMA_SETUP = 
"flink_agents.integrations.embedding_models.local.ollama_embedding_model.OllamaEmbeddingModelSetup"
+
+        # OpenAI
+        OPENAI_CONNECTION = 
"flink_agents.integrations.embedding_models.openai_embedding_model.OpenAIEmbeddingModelConnection"
+        OPENAI_SETUP = 
"flink_agents.integrations.embedding_models.openai_embedding_model.OpenAIEmbeddingModelSetup"
+
+        # Java Wrapper
+        JAVA_WRAPPER_CONNECTION = 
"flink_agents.api.embedding_models.java_embedding_model.JavaEmbeddingModelConnection"
+        JAVA_WRAPPER_SETUP = 
"flink_agents.api.embedding_models.java_embedding_model.JavaEmbeddingModelSetup"
+
+        class Java:
+            """Java implementations of EmbeddingModel."""
+
+            # Ollama
+            OLLAMA_CONNECTION = 
"org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelConnection"
+            OLLAMA_SETUP = 
"org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelSetup"
+
+    class VectorStore:
+        """VectorStore resource names."""
+
+        # Chroma
+        CHROMA_VECTOR_STORE = 
"flink_agents.integrations.vector_stores.chroma.chroma_vector_store.ChromaVectorStore"
+
+        # Java Wrapper
+        JAVA_WRAPPER_VECTOR_STORE = 
"flink_agents.api.vector_stores.java_vector_store.JavaVectorStore"
+        JAVA_WRAPPER_COLLECTION_MANAGEABLE_VECTOR_STORE = 
"flink_agents.api.vector_stores.java_vector_store.JavaCollectionManageableVectorStore"
+
+        class Java:
+            """Java implementations of VectorStore."""
+
+            # Elasticsearch
+            ELASTICSEARCH_VECTOR_STORE = 
"org.apache.flink.agents.integrations.vectorstores.elasticsearch.ElasticsearchVectorStore"
+
+    class MCP:

Review Comment:
   `ResourceName.MCP.SERVER` is unnecessary. `ResourceName.MCP_SERVER` should 
be good enough.



##########
api/src/main/java/org/apache/flink/agents/api/resource/ResourceName.java:
##########
@@ -0,0 +1,183 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.flink.agents.api.resource;
+
+/**
+ * Hierarchical resource class names for pointing a resource implementation in 
{@link
+ * ResourceDescriptor}.
+ *
+ * <p>Structure:
+ *
+ * <ul>
+ *   <li>Java implementation: ResourceType.PROVIDER_RESOURCEKIND
+ *   <li>Python implementation: ResourceType.Python.PROVIDER_RESOURCEKIND
+ * </ul>
+ *
+ * <p>Example usage:
+ *
+ * <pre>{@code
+ * // Java implementation
+ * ResourceName.ChatModel.OLLAMA_CONNECTION
+ * ResourceName.ChatModel.OPENAI_SETUP
+ *
+ * // Python implementation
+ * ResourceName.ChatModel.Python.OLLAMA_CONNECTION
+ * }</pre>
+ */
+public final class ResourceName {
+
+    private ResourceName() {}
+
+    // ==================== ChatModel ====================
+    public static final class ChatModel {
+
+        // Anthropic
+        public static final String ANTHROPIC_CONNECTION =
+                
"org.apache.flink.agents.integrations.chatmodels.anthropic.AnthropicChatModelConnection";
+        public static final String ANTHROPIC_SETUP =
+                
"org.apache.flink.agents.integrations.chatmodels.anthropic.AnthropicChatModelSetup";
+
+        // Azure
+        public static final String AZURE_CONNECTION =
+                
"org.apache.flink.agents.integrations.chatmodels.anthropic.AzureAIChatModelConnection";
+        public static final String AZURE_SETUP =
+                
"org.apache.flink.agents.integrations.chatmodels.anthropic.AzureAIChatModelSetup";
+
+        // Ollama
+        public static final String OLLAMA_CONNECTION =
+                
"org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelConnection";
+        public static final String OLLAMA_SETUP =
+                
"org.apache.flink.agents.integrations.chatmodels.ollama.OllamaChatModelSetup";
+
+        // OpenAI
+        public static final String OPENAI_CONNECTION =
+                
"org.apache.flink.agents.integrations.chatmodels.openai.OpenAIChatModelConnection";
+        public static final String OPENAI_SETUP =
+                
"org.apache.flink.agents.integrations.chatmodels.openai.OpenAIChatModelSetup";
+
+        // Python Wrapper
+        public static final String PYTHON_WRAPPER_CONNECTION =
+                
"org.apache.flink.agents.api.chat.model.python.PythonChatModelConnection";
+        public static final String PYTHON_WRAPPER_SETUP =
+                
"org.apache.flink.agents.api.chat.model.python.PythonChatModelSetup";
+
+        /** Python implementations of ChatModel. */
+        public static final class Python {
+
+            // Anthropic
+            public static final String ANTHROPIC_CONNECTION =
+                    
"flink_agents.integrations.chat_models.anthropic.anthropic_chat_model.AnthropicChatModelConnection";
+            public static final String ANTHROPIC_SETUP =
+                    
"flink_agents.integrations.chat_models.anthropic.anthropic_chat_model.AnthropicChatModelSetup";
+
+            // Ollama
+            public static final String OLLAMA_CONNECTION =
+                    
"flink_agents.integrations.chat_models.ollama_chat_model.OllamaChatModelConnection";
+            public static final String OLLAMA_SETUP =
+                    
"flink_agents.integrations.chat_models.ollama_chat_model.OllamaChatModelSetup";
+
+            // OpenAI
+            public static final String OPENAI_CONNECTION =
+                    
"flink_agents.integrations.chat_models.openai.openai_chat_model.OpenAIChatModelConnection";
+            public static final String OPENAI_SETUP =
+                    
"flink_agents.integrations.chat_models.openai.openai_chat_model.OpenAIChatModelSetup";
+
+            // Tongyi
+            public static final String TONGYI_CONNECTION =
+                    
"flink_agents.integrations.chat_models.tongyi_chat_model.TongyiChatModelConnection";
+            public static final String TONGYI_SETUP =
+                    
"flink_agents.integrations.chat_models.tongyi_chat_model.TongyiChatModelSetup";
+
+            private Python() {}
+        }
+
+        private ChatModel() {}
+    }
+
+    // ==================== EmbeddingModel ====================
+    public static final class EmbeddingModel {
+
+        // Ollama
+        public static final String OLLAMA_CONNECTION =
+                
"org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelConnection";
+        public static final String OLLAMA_SETUP =
+                
"org.apache.flink.agents.integrations.embeddingmodels.ollama.OllamaEmbeddingModelSetup";
+
+        // Python Wrapper
+        public static final String PYTHON_WRAPPER_CONNECTION =
+                
"org.apache.flink.agents.api.embedding.model.python.PythonEmbeddingModelConnection";
+        public static final String PYTHON_WRAPPER_SETUP =
+                
"org.apache.flink.agents.api.embedding.model.python.PythonEmbeddingModelSetup";
+
+        /** Python implementations of EmbeddingModel. */
+        public static final class Python {
+
+            // Ollama
+            public static final String OLLAMA_CONNECTION =
+                    
"flink_agents.integrations.embedding_models.local.ollama_embedding_model.OllamaEmbeddingModelConnection";
+            public static final String OLLAMA_SETUP =
+                    
"flink_agents.integrations.embedding_models.local.ollama_embedding_model.OllamaEmbeddingModelSetup";
+
+            // OpenAI
+            public static final String OPENAI_CONNECTION =
+                    
"flink_agents.integrations.embedding_models.openai_embedding_model.OpenAIEmbeddingModelConnection";
+            public static final String OPENAI_SETUP =
+                    
"flink_agents.integrations.embedding_models.openai_embedding_model.OpenAIEmbeddingModelSetup";
+
+            private Python() {}
+        }
+
+        private EmbeddingModel() {}
+    }
+
+    // ==================== VectorStore ====================
+    public static final class VectorStore {
+
+        // Elasticsearch
+        public static final String ELASTICSEARCH_VECTOR_STORE =
+                
"org.apache.flink.agents.integrations.vectorstores.elasticsearch.ElasticsearchVectorStore";
+
+        // Python Wrapper
+        public static final String PYTHON_WRAPPER_VECTOR_STORE =
+                
"org.apache.flink.agents.api.vectorstores.python.PythonVectorStore";
+
+        public static final String 
PYTHON_WRAPPER_COLLECTION_MANAGEABLE_VECTOR_STORE =
+                
"org.apache.flink.agents.api.vectorstores.python.PythonCollectionManageableVectorStore";
+
+        /** Python implementations of VectorStore. */
+        public static final class Python {
+
+            // Chroma
+            public static final String CHROMA_VECTOR_STORE =
+                    
"flink_agents.integrations.vector_stores.chroma.chroma_vector_store.ChromaVectorStore";
+
+            private Python() {}
+        }
+
+        private VectorStore() {}
+    }
+
+    // ==================== MCP ====================
+    public static final class MCP {
+
+        public static final String SERVER = "DECIDE_IN_RUNTIME_MCPServer";

Review Comment:
   ditto



-- 
This is an automated message from the Apache Git Service.
To respond to the message, please log on to GitHub and use the
URL above to go to the specific comment.

To unsubscribe, e-mail: [email protected]

For queries about this service, please contact Infrastructure at:
[email protected]

Reply via email to