This is an automated email from the ASF dual-hosted git repository.

xtsong pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/flink-agents.git

commit bbf61d010e52e9c21a9b132897832607474c6e47
Author: WenjinXie <[email protected]>
AuthorDate: Thu Jan 15 19:44:44 2026 +0800

    [doc] Update documents after using constant in resource descriptor.
---
 docs/content/docs/development/chat_models.md       | 32 +++++++--------
 docs/content/docs/development/embedding_models.md  | 12 +++---
 docs/content/docs/development/react_agent.md       |  4 +-
 docs/content/docs/development/vector_stores.md     | 32 +++++++--------
 docs/content/docs/development/workflow_agent.md    |  4 +-
 .../docs/get-started/quickstart/react_agent.md     | 45 +++++++++++-----------
 .../docs/get-started/quickstart/workflow_agent.md  |  6 +--
 7 files changed, 68 insertions(+), 67 deletions(-)

diff --git a/docs/content/docs/development/chat_models.md 
b/docs/content/docs/development/chat_models.md
index d9195aa5..d6659425 100644
--- a/docs/content/docs/development/chat_models.md
+++ b/docs/content/docs/development/chat_models.md
@@ -65,7 +65,7 @@ class MyAgent(Agent):
     @staticmethod
     def ollama_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OllamaChatModelConnection,
+            clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION,
             base_url="http://localhost:11434";,
             request_timeout=30.0
         )
@@ -74,7 +74,7 @@ class MyAgent(Agent):
     @staticmethod
     def ollama_chat_model() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OllamaChatModelSetup,
+            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
             connection="ollama_connection",
             model="qwen3:8b",
             temperature=0.7
@@ -106,14 +106,14 @@ class MyAgent(Agent):
 public class MyAgent extends Agent {
     @ChatModelConnection
     public static ResourceDescriptor ollamaConnection() {
-        return 
ResourceDescriptor.Builder.newBuilder(OllamaChatModelConnection.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_CONNECTION)
                 .addInitialArgument("endpoint", "http://localhost:11434";)
                 .build();
     }
 
     @ChatModelSetup
     public static ResourceDescriptor ollamaChatModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(OllamaChatModelSetup.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_SETUP)
                 .addInitialArgument("connection", "ollamaConnection")
                 .addInitialArgument("model", "qwen3:8b")
                 .build();
@@ -196,7 +196,7 @@ Azure AI is only supported in Java currently.
 public class MyAgent extends Agent {
     @ChatModelConnection
     public static ResourceDescriptor azureAIConnection() {
-        return 
ResourceDescriptor.Builder.newBuilder(AzureAIChatModelConnection.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(Constant.AZURE_CHAT_MODEL_CONNECTION)
                 .addInitialArgument("endpoint", 
"https://your-resource.inference.ai.azure.com";)
                 .addInitialArgument("apiKey", "your-api-key-here")
                 .build();
@@ -204,7 +204,7 @@ public class MyAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor azureAIChatModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(AzureAIChatModelSetup.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(Constant.AZURE_CHAT_MODEL_SETUP)
                 .addInitialArgument("connection", "azureAIConnection")
                 .addInitialArgument("model", "gpt-4o")
                 .build();
@@ -272,7 +272,7 @@ class MyAgent(Agent):
     @staticmethod
     def anthropic_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=AnthropicChatModelConnection,
+            clazz=Constant.ANTHROPIC_CHAT_MODEL_CONNECTION,
             api_key="your-api-key-here",  # Or set ANTHROPIC_API_KEY env var
             max_retries=3,
             timeout=60.0
@@ -282,7 +282,7 @@ class MyAgent(Agent):
     @staticmethod
     def anthropic_chat_model() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=AnthropicChatModelSetup,
+            clazz=Constant.ANTHROPIC_CHAT_MODEL_SETUP,
             connection="anthropic_connection",
             model="claude-sonnet-4-20250514",
             max_tokens=2048,
@@ -383,7 +383,7 @@ class MyAgent(Agent):
     @staticmethod
     def ollama_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OllamaChatModelConnection,
+            clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION,
             base_url="http://localhost:11434";,
             request_timeout=120.0
         )
@@ -392,7 +392,7 @@ class MyAgent(Agent):
     @staticmethod
     def my_chat_model() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OllamaChatModelSetup,
+            clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION,
             connection="ollama_connection",
             model="qwen3:8b",
             temperature=0.7,
@@ -410,7 +410,7 @@ class MyAgent(Agent):
 public class MyAgent extends Agent {
     @ChatModelConnection
     public static ResourceDescriptor ollamaConnection() {
-        return 
ResourceDescriptor.Builder.newBuilder(OllamaChatModelConnection.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_CONNECTION)
                 .addInitialArgument("endpoint", "http://localhost:11434";)
                 .addInitialArgument("requestTimeout", 120)
                 .build();
@@ -418,7 +418,7 @@ public class MyAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor ollamaChatModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(OllamaChatModelSetup.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_SETUP)
                 .addInitialArgument("connection", "ollamaConnection")
                 .addInitialArgument("model", "qwen3:8b")
                 .build();
@@ -495,7 +495,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OpenAIChatModelConnection,
+            clazz=Constant.OPENAI_CHAT_MODEL_CONNECTION,
             api_key="your-api-key-here",  # Or set OPENAI_API_KEY env var
             api_base_url="https://api.openai.com/v1";,
             max_retries=3,
@@ -506,7 +506,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_chat_model() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OpenAIChatModelSetup,
+            clazz=Constant.OPENAI_CHAT_MODEL_SETUP,
             connection="openai_connection",
             model="gpt-4",
             temperature=0.7,
@@ -569,7 +569,7 @@ class MyAgent(Agent):
     @staticmethod
     def tongyi_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=TongyiChatModelConnection,
+            clazz=Constant.TONGYI_CHAT_MODEL_CONNECTION,
             api_key="your-api-key-here",  # Or set DASHSCOPE_API_KEY env var
             request_timeout=60.0
         )
@@ -578,7 +578,7 @@ class MyAgent(Agent):
     @staticmethod
     def tongyi_chat_model() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=TongyiChatModelSetup,
+            clazz=Constant.TONGYI_CHAT_MODEL_SETUP,
             connection="tongyi_connection",
             model="qwen-plus",
             temperature=0.7,
diff --git a/docs/content/docs/development/embedding_models.md 
b/docs/content/docs/development/embedding_models.md
index d24912c6..288d300d 100644
--- a/docs/content/docs/development/embedding_models.md
+++ b/docs/content/docs/development/embedding_models.md
@@ -69,7 +69,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OpenAIEmbeddingModelConnection,
+            clazz=Constant.OPENAI_EMBEDDING_MODEL_CONNECTION,
             api_key="your-api-key-here",
             base_url="https://api.openai.com/v1";,
             request_timeout=30.0
@@ -79,7 +79,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_embedding() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OpenAIEmbeddingModelSetup,
+            clazz=Constant.OPENAI_EMBEDDING_MODEL_SETUP,
             connection="openai_connection",
             model="your-embedding-model-here"
         )
@@ -136,7 +136,7 @@ class MyAgent(Agent):
     @staticmethod
     def ollama_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OllamaEmbeddingModelConnection,
+            clazz=Constant.OLLAMA_EMBEDDING_MODEL_CONNECTION,
             base_url="http://localhost:11434";,
             request_timeout=30.0
         )
@@ -145,7 +145,7 @@ class MyAgent(Agent):
     @staticmethod
     def ollama_embedding() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OllamaEmbeddingModelSetup,
+            clazz=Constant.OLLAMA_EMBEDDING_MODEL_SETUP,
             connection="ollama_connection",
             model="nomic-embed-text",
             truncate=True,
@@ -185,7 +185,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OpenAIEmbeddingModelConnection,
+            clazz=Constant.OPENAI_EMBEDDING_MODEL_CONNECTION,
             api_key="your-api-key-here",
             base_url="https://api.openai.com/v1";,
             request_timeout=30.0,
@@ -196,7 +196,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_embedding() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OpenAIEmbeddingModelSetup,
+            clazz=Constant.OPENAI_CHAT_MODEL_SETUP,
             connection="openai_connection",
             model="your-embedding-model-here",
             encoding_format="float"
diff --git a/docs/content/docs/development/react_agent.md 
b/docs/content/docs/development/react_agent.md
index a8de5072..c1dbfa5b 100644
--- a/docs/content/docs/development/react_agent.md
+++ b/docs/content/docs/development/react_agent.md
@@ -65,7 +65,7 @@ We use `ResourceDescriptor` to describe the chat model, 
includes chat model type
 {{< tab "Python" >}}
 ```python
 chat_model_descriptor = ResourceDescriptor(
-    clazz=OllamaChatModelSetup,
+    clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
     connection="my_ollama_connection",
     model="qwen3:8b",
     tools=["my_tool1, my_tool2"],
@@ -76,7 +76,7 @@ chat_model_descriptor = ResourceDescriptor(
 {{< tab "Java" >}}
 ```java
 ResourceDescriptor chatModelDescriptor =
-                
ResourceDescriptor.Builder.newBuilder(OllamaChatModelSetup.class.getName())
+                
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_SETUP)
                         .addInitialArgument("connection", "myOllamaConnection")
                         .addInitialArgument("model", "qwen3:8b")
                         .addInitialArgument(
diff --git a/docs/content/docs/development/vector_stores.md 
b/docs/content/docs/development/vector_stores.md
index a86feeef..e628108d 100644
--- a/docs/content/docs/development/vector_stores.md
+++ b/docs/content/docs/development/vector_stores.md
@@ -147,7 +147,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OpenAIEmbeddingModelConnection,
+            clazz=Constant.OPENAI_EMBEDDING_MODEL_CONNECTION,
             api_key="your-api-key-here"
         )
 
@@ -155,7 +155,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_embedding() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OpenAIEmbeddingModelSetup,
+            clazz=Constant.OPENAI_EMBEDDING_MODEL_SETUP,
             connection="openai_connection",
             model="your-embedding-model-here"
         )
@@ -165,7 +165,7 @@ class MyAgent(Agent):
     @staticmethod
     def chroma_store() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=ChromaVectorStore,
+            clazz=Constant.CHROMA_VECTOR_STORE,
             embedding_model="openai_embedding",
             collection="my_chroma_store"
         )
@@ -199,22 +199,22 @@ public class MyAgent extends Agent {
 
     @EmbeddingModelConnection
     public static ResourceDescriptor embeddingConnection() {
-        return 
ResourceDescriptor.Builder.newBuilder(OpenAIEmbeddingModelConnection.class.getName())
-                .addInitialArgument("api_key", "your-api-key-here")
+        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_EMBEDDING_MODEL_CONNECTION)
+                .addInitialArgument("host", "http://localhost:11434";)
                 .build();
     }
 
     @EmbeddingModelSetup
     public static ResourceDescriptor embeddingModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(OpenAIEmbeddingModelSetup.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_EMBEDDING_MODEL_SETUP)
                 .addInitialArgument("connection", "embeddingConnection")
-                .addInitialArgument("model", "text-embedding-3-small")
+                .addInitialArgument("model", "nomic-embed-text")
                 .build();
     }
 
     @VectorStore
     public static ResourceDescriptor vectorStore() {
-        return 
ResourceDescriptor.Builder.newBuilder(ElasticsearchVectorStore.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(Constant.ELASTICSEARCH_VECTOR_STORE)
                 .addInitialArgument("embedding_model", "embeddingModel")
                 .addInitialArgument("host", "http://localhost:9200";)
                 .addInitialArgument("index", "my_documents")
@@ -293,7 +293,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_connection() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OpenAIEmbeddingModelConnection,
+            clazz=Constant.OPENAI_EMBEDDING_MODEL_CONNECTION,
             api_key="your-api-key-here"
         )
 
@@ -301,7 +301,7 @@ class MyAgent(Agent):
     @staticmethod
     def openai_embedding() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=OpenAIEmbeddingModelSetup,
+            clazz=Constant.OPENAI_EMBEDDING_MODEL_SETUP,
             connection="openai_connection",
           model="your-embedding-model-here"
         )
@@ -311,7 +311,7 @@ class MyAgent(Agent):
     @staticmethod
     def chroma_store() -> ResourceDescriptor:
         return ResourceDescriptor(
-            clazz=ChromaVectorStore,
+            clazz=Constant.CHROMA_VECTOR_STORE,
             embedding_model="openai_embedding",
             persist_directory="/path/to/chroma/data",  # For persistent storage
             collection="my_documents",
@@ -338,7 +338,7 @@ ChromaDB supports multiple deployment modes:
 @staticmethod
 def chroma_store() -> ResourceDescriptor:
     return ResourceDescriptor(
-        clazz=ChromaVectorStore,
+        clazz=Constant.CHROMA_VECTOR_STORE,
         embedding_model="your_embedding_model",
         collection="my_documents"
         # No connection configuration needed for in-memory mode
@@ -351,7 +351,7 @@ def chroma_store() -> ResourceDescriptor:
 @staticmethod
 def chroma_store() -> ResourceDescriptor:
     return ResourceDescriptor(
-        clazz=ChromaVectorStore,
+        clazz=Constant.CHROMA_VECTOR_STORE,
         embedding_model="your_embedding_model",
         persist_directory="/path/to/chroma/data",
         collection="my_documents"
@@ -364,7 +364,7 @@ def chroma_store() -> ResourceDescriptor:
 @staticmethod
 def chroma_store() -> ResourceDescriptor:
     return ResourceDescriptor(
-        clazz=ChromaVectorStore,
+        clazz=Constant.CHROMA_VECTOR_STORE,
         embedding_model="your_embedding_model",
         host="your-chroma-server.com",
         port=8000,
@@ -378,7 +378,7 @@ def chroma_store() -> ResourceDescriptor:
 @staticmethod
 def chroma_store() -> ResourceDescriptor:
     return ResourceDescriptor(
-        clazz=ChromaVectorStore,
+        clazz=Constant.CHROMA_VECTOR_STORE,
         embedding_model="your_embedding_model",
         api_key="your-chroma-cloud-api-key",
         collection="my_documents"
@@ -428,7 +428,7 @@ Here's how to define an Elasticsearch vector store in your 
Java agent:
 ```java
 @VectorStore
 public static ResourceDescriptor vectorStore() {
-    return 
ResourceDescriptor.Builder.newBuilder(ElasticsearchVectorStore.class.getName())
+    return 
ResourceDescriptor.Builder.newBuilder(Constant.ELASTICSEARCH_VECTOR_STORE)
             .addInitialArgument("embedding_model", "embeddingModel")
             .addInitialArgument("host", "http://localhost:9200";)
             .addInitialArgument("index", "my_documents")
diff --git a/docs/content/docs/development/workflow_agent.md 
b/docs/content/docs/development/workflow_agent.md
index 76431d0b..29e1c097 100644
--- a/docs/content/docs/development/workflow_agent.md
+++ b/docs/content/docs/development/workflow_agent.md
@@ -73,7 +73,7 @@ class ReviewAnalysisAgent(Agent):
     def review_analysis_model() -> ResourceDescriptor:
         """ChatModel which focus on review analysis."""
         return ResourceDescriptor(
-            clazz=OllamaChatModelSetup,
+            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
             connection="ollama_server",
             model="qwen3:8b",
             prompt="review_analysis_prompt",
@@ -139,7 +139,7 @@ public class ReviewAnalysisAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor reviewAnalysisModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(OllamaChatModelSetup.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_SETUP)
                 .addInitialArgument("connection", "ollamaChatModelConnection")
                 .addInitialArgument("model", "qwen3:8b")
                 .addInitialArgument("prompt", "reviewAnalysisPrompt")
diff --git a/docs/content/docs/get-started/quickstart/react_agent.md 
b/docs/content/docs/get-started/quickstart/react_agent.md
index b58a5860..e3fa22e1 100644
--- a/docs/content/docs/get-started/quickstart/react_agent.md
+++ b/docs/content/docs/get-started/quickstart/react_agent.md
@@ -47,10 +47,11 @@ agents_env = 
AgentsExecutionEnvironment.get_execution_environment(env)
 # Add Ollama chat model connection and notify shipping manager tool to be used
 # by the Agent.
 agents_env.add_resource(
-    "ollama_server",
-    ResourceDescriptor(clazz=OllamaChatModelConnection, request_timeout=120),
+  "ollama_server",
+  ResourceType.CHAT_MODEL_CONNECTION,
+  ResourceDescriptor(clazz=Constant.OLLAMA_CHAT_MODEL_CONNECTION, 
request_timeout=120),
 ).add_resource(
-    "notify_shipping_manager", Tool.from_callable(notify_shipping_manager)
+  "notify_shipping_manager", ResourceType.TOOL, 
Tool.from_callable(notify_shipping_manager)
 )
 ```
 {{< /tab >}}
@@ -88,14 +89,14 @@ Create the ReAct Agent instance, configure the chat model, 
prompt and the output
 {{< tab "Python" >}}
 ```python
 review_analysis_react_agent = ReActAgent(
-    chat_model=ResourceDescriptor(
-        clazz=OllamaChatModelSetup,
-        connection="ollama_server",
-        model="qwen3:8b",
-        tools=["notify_shipping_manager"],
-    ),
-    prompt=review_analysis_react_prompt,
-    output_schema=ProductReviewAnalysisRes,
+  chat_model=ResourceDescriptor(
+      clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
+      connection="ollama_server",
+      model="qwen3:8b",
+      tools=["notify_shipping_manager"],
+  ),
+  prompt=review_analysis_react_prompt,
+  output_schema=ProductReviewAnalysisRes,
 )
 ```
 {{< /tab >}}
@@ -105,17 +106,17 @@ review_analysis_react_agent = ReActAgent(
 // Create ReAct agent.
 ReActAgent reviewAnalysisReactAgent = getReActAgent();
 
-private static ReActAgent getReActAgent() {
-    return new ReActAgent(
-            
ResourceDescriptor.Builder.newBuilder(OllamaChatModelSetup.class.getName())
-                    .addInitialArgument("connection", 
"ollamaChatModelConnection")
-                    .addInitialArgument("model", "qwen3:8b")
-                    .addInitialArgument(
-                            "tools", 
Collections.singletonList("notifyShippingManager"))
-                    .build(),
-            reviewAnalysisReactPrompt(),
-            CustomTypesAndResources.ProductReviewAnalysisRes.class);
-}
+ private static ReActAgent getReActAgent() {
+     return new ReActAgent(
+             
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_SETUP)
+                     .addInitialArgument("connection", 
"ollamaChatModelConnection")
+                     .addInitialArgument("model", "qwen3:8b")
+                     .addInitialArgument(
+                             "tools", 
Collections.singletonList("notifyShippingManager"))
+                     .build(),
+             reviewAnalysisReactPrompt(),
+             CustomTypesAndResources.ProductReviewAnalysisRes.class);
+ }
 ```
 {{< /tab >}}
 
diff --git a/docs/content/docs/get-started/quickstart/workflow_agent.md 
b/docs/content/docs/get-started/quickstart/workflow_agent.md
index f7c50005..43c3e3fc 100644
--- a/docs/content/docs/get-started/quickstart/workflow_agent.md
+++ b/docs/content/docs/get-started/quickstart/workflow_agent.md
@@ -119,7 +119,7 @@ class ReviewAnalysisAgent(Agent):
     def review_analysis_model() -> ResourceDescriptor:
         """ChatModel which focus on review analysis."""
         return ResourceDescriptor(
-            clazz=OllamaChatModelSetup,
+            clazz=Constant.OLLAMA_CHAT_MODEL_SETUP,
             connection="ollama_server",
             model="qwen3:8b",
             prompt="review_analysis_prompt",
@@ -185,12 +185,12 @@ public class ReviewAnalysisAgent extends Agent {
 
     @ChatModelSetup
     public static ResourceDescriptor reviewAnalysisModel() {
-        return 
ResourceDescriptor.Builder.newBuilder(OllamaChatModelSetup.class.getName())
+        return 
ResourceDescriptor.Builder.newBuilder(Constant.OLLAMA_CHAT_MODEL_SETUP)
                 .addInitialArgument("connection", "ollamaChatModelConnection")
                 .addInitialArgument("model", "qwen3:8b")
                 .addInitialArgument("prompt", "reviewAnalysisPrompt")
                 .addInitialArgument("tools", 
Collections.singletonList("notifyShippingManager"))
-                .addInitialArgument("extract_reasoning", "true")
+                .addInitialArgument("extract_reasoning", true)
                 .build();
     }
 

Reply via email to