This is an automated email from the ASF dual-hosted git repository.

alexstocks pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/dubbo-go-samples.git


The following commit(s) were added to refs/heads/main by this push:
     new 7cd39767 Change the default model of the LLM example; implemented: 
frontend can display the uploaded image. (#814)
7cd39767 is described below

commit 7cd397674894169603eaaca0315cbfd759c24a06
Author: Alan <[email protected]>
AuthorDate: Sat Mar 22 20:15:51 2025 +0800

    Change the default model of the LLM example; implemented: frontend can 
display the uploaded image. (#814)
    
    * change default model; implement display uploaded pic; rewrite overtime 
mechanism
    
    * add go mod
    
    * add license
    
    * crlf->lf
    
    * crlf -> lf
    
    * crlf -> lf
    
    * crlf -> lf
    
    * crlf -> lf
    
    * crlf -> lf
    
    * spaces
    
    ---------
    
    Co-authored-by: Xuetao Li <[email protected]>
---
 go.mod                                      |   1 +
 llm/.env                                    |  20 ++++++
 llm/README.md                               |  10 ++-
 llm/README_zh.md                            |  12 +++-
 llm/go-client/frontend/handlers/chat.go     |  10 ++-
 llm/go-client/frontend/main.go              |  30 ++++++++-
 llm/go-client/frontend/static/file.png      | Bin 7952 -> 0 bytes
 llm/go-client/frontend/static/script.js     |  99 ++++++++++++++++++++--------
 llm/go-client/frontend/static/style.css     |  12 ++++
 llm/go-client/frontend/templates/index.html |  47 +++++++------
 llm/go-server/cmd/server.go                 |  19 +++++-
 11 files changed, 204 insertions(+), 56 deletions(-)

diff --git a/go.mod b/go.mod
index 21a91546..e24d4287 100644
--- a/go.mod
+++ b/go.mod
@@ -14,6 +14,7 @@ require (
        github.com/gin-gonic/gin v1.10.0
        github.com/gogo/protobuf v1.3.2
        github.com/golang/protobuf v1.5.4
+       github.com/joho/godotenv v1.3.0
        github.com/nacos-group/nacos-sdk-go/v2 v2.2.2
        github.com/opentracing/opentracing-go v1.2.0
        github.com/openzipkin-contrib/zipkin-go-opentracing v0.4.5
diff --git a/llm/.env b/llm/.env
new file mode 100644
index 00000000..4ab48487
--- /dev/null
+++ b/llm/.env
@@ -0,0 +1,20 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements.  See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
+OLLAMA_MODEL = llava:7b
+TIME_OUT_SECOND = 300
\ No newline at end of file
diff --git a/llm/README.md b/llm/README.md
index a7472907..09085344 100644
--- a/llm/README.md
+++ b/llm/README.md
@@ -31,9 +31,13 @@ $ ollama serve
 ### **Download Model**
 
 ```shell
-$ ollama run deepseek-r1:1.5b
+$ ollama pull llava:7b
 ```
 
+Default model uses ```llava:7b```, a novel end-to-end trained large multimodal 
model.
+
+You can pull your favourite model and specify the demo to use the model in 
```.env``` file
+
 ## 3. **Run the Example**
 
 You need to run all the commands in ```llm``` directory.
@@ -71,3 +75,7 @@ $ go run go-client/frontend/main.go
 
 Frontend client supports multi-turn conversations, binary file (image) support 
for LLM interactions.
 Currently the supported uploaded image types are limited to png, jpeg and gif, 
with plans to support more binary file types in the future.
+
+### **Notice**
+
+The default timeout is set to two minutes, please make sure that your 
computer's performance can generate the corresponding response within two 
minutes, otherwise it will report an error timeout, you can set your own 
timeout time in the ```.env``` file
\ No newline at end of file
diff --git a/llm/README_zh.md b/llm/README_zh.md
index 3000c1fc..4236d5a1 100644
--- a/llm/README_zh.md
+++ b/llm/README_zh.md
@@ -31,9 +31,13 @@ $ ollama serve
 ### 下载模型
 
 ```shell
-$ ollama run deepseek-r1:1.5b
+$ ollama pull llava:7b
 ```
 
+默认模型使用```llava:7b```,这是一个新颖的端到端多模态的大模型。
+
+您可以自行pull自己喜欢的模型,并在 ```.env``` 文件中指定该demo使用模型。
+
 ## **3. 运行示例**
 
 以下所有的命令都需要在```llm``` 目录下运行.
@@ -70,4 +74,8 @@ $ go run go-client/frontend/main.go
 ```
 
 包含前端页面的客户端支持多轮对话,支持进行二进制文件(图片)传输并与大模型进行交互。
-目前所支持上传的图片类型被限制为 png,jpeg 和 gif 类型,计划在将来支持更多的二进制文件类型。
\ No newline at end of file
+目前所支持上传的图片类型被限制为 png,jpeg 和 gif 类型,计划在将来支持更多的二进制文件类型。
+
+### **注意事项**
+
+默认超时时间为两分钟,请确保您的电脑性能能在两分钟内生成相应的响应,否则会超时报错,您也可以在 ```.env``` 文件中自行设置超时时间。
\ No newline at end of file
diff --git a/llm/go-client/frontend/handlers/chat.go 
b/llm/go-client/frontend/handlers/chat.go
index e16197e2..21d38fd5 100644
--- a/llm/go-client/frontend/handlers/chat.go
+++ b/llm/go-client/frontend/handlers/chat.go
@@ -22,8 +22,10 @@ import (
        "io"
        "log"
        "net/http"
+       "os"
        "regexp"
        "runtime/debug"
+       "strconv"
        "time"
 )
 
@@ -149,6 +151,10 @@ func (h *ChatHandler) Chat(c *gin.Context) {
        }()
 
        // SSE stream output
+       timeout, err := strconv.Atoi(os.Getenv("TIME_OUT_SECOND"))
+       if err != nil {
+               timeout = 300
+       }
        c.Stream(func(w io.Writer) bool {
                select {
                case chunk, ok := <-responseCh:
@@ -157,8 +163,8 @@ func (h *ChatHandler) Chat(c *gin.Context) {
                        }
                        c.SSEvent("message", gin.H{"content": chunk})
                        return true
-               case <-time.After(30 * time.Second):
-                       log.Println("Stream timed out")
+               case <-time.After(time.Duration(timeout) * time.Second):
+                       log.Println("Stream time out")
                        return false
                case <-c.Request.Context().Done():
                        log.Println("Client disconnected")
diff --git a/llm/go-client/frontend/main.go b/llm/go-client/frontend/main.go
index 259d97c0..60146ff2 100644
--- a/llm/go-client/frontend/main.go
+++ b/llm/go-client/frontend/main.go
@@ -19,6 +19,8 @@ package main
 
 import (
        "fmt"
+       "net/http"
+       "os"
 )
 
 import (
@@ -29,6 +31,8 @@ import (
        "github.com/gin-contrib/sessions/cookie"
 
        "github.com/gin-gonic/gin"
+
+       "github.com/joho/godotenv"
 )
 
 import (
@@ -38,6 +42,25 @@ import (
 )
 
 func main() {
+       err := godotenv.Load(".env")
+       if err != nil {
+               panic(fmt.Sprintf("Error loading .env file: %v", err))
+       }
+
+       _, exist := os.LookupEnv("TIME_OUT_SECOND")
+
+       if !exist {
+               fmt.Println("TIME_OUT_SECOND is not set")
+               return
+       }
+
+       _, exist = os.LookupEnv("OLLAMA_MODEL")
+
+       if !exist {
+               fmt.Println("OLLAMA_MODEL is not set")
+               return
+       }
+
        // init Dubbo
        cli, err := client.NewClient(
                client.WithClientURL("tri://127.0.0.1:20000"),
@@ -67,7 +90,12 @@ func main() {
 
        // register route
        h := handlers.NewChatHandler(svc, ctxManager)
-       r.GET("/", h.Index)
+       r.GET("/", func(c *gin.Context) {
+               c.HTML(http.StatusOK, "index.html", gin.H{
+                       "TimeoutSecond": os.Getenv("TIME_OUT_SECOND"),
+                       "OllamaModel":   os.Getenv("OLLAMA_MODEL"),
+               })
+       })
        r.POST("/api/chat", h.Chat)
        r.POST("/api/context/new", h.NewContext)
        r.GET("/api/context/list", h.ListContexts)
diff --git a/llm/go-client/frontend/static/file.png 
b/llm/go-client/frontend/static/file.png
deleted file mode 100644
index 04250526..00000000
Binary files a/llm/go-client/frontend/static/file.png and /dev/null differ
diff --git a/llm/go-client/frontend/static/script.js 
b/llm/go-client/frontend/static/script.js
index 3b56d2cc..8e8d51fa 100644
--- a/llm/go-client/frontend/static/script.js
+++ b/llm/go-client/frontend/static/script.js
@@ -11,51 +11,81 @@ const inputInitHeight = chatInput.scrollHeight;
 let fileBlobArr = [];
 let fileArr = [];
 
-const createChatLi = (message, className) => {
-    // Create a chat <li> element with passed message and className
+const createChatLi = (content, className) => {
     const chatLi = document.createElement("li");
     chatLi.classList.add("chat", `${className}`);
-    let chatContent = className === "outgoing" ? `<p></p>` : `<span 
class="material-symbols-outlined">smart_toy</span><p></p>`;
-    chatLi.innerHTML = chatContent;
-    chatLi.querySelector("p").textContent = message;
-    return chatLi; // return chat <li> element
-}
 
-const handleChat = () => {
-    userMessage = chatInput.value.trim(); // Get user entered message and 
remove extra whitespace
-    userBin = null
-    if (fileBlobArr.length > 0) {
-        userBin = fileBlobArr[0]
+    if (!(className === "outgoing")) {
+        let toy = document.createElement('span');
+        toy.className = "material-symbols-outlined";
+        toy.innerText = "smart_toy"
+        chatLi.appendChild(toy);
     }
-    if(!userMessage && !userBin) return;
 
-    // Clear the input textarea and set its height to default
+    const contents = Array.isArray(content) ? content : [content];
+
+    contents.forEach(item => {
+        if (!item) return;
+        if (item.startsWith('data:image')) {
+            const img = document.createElement('img');
+            img.src = item;
+            chatLi.appendChild(img);
+        } else {
+            const p = document.createElement('p');
+            p.textContent = item;
+            chatLi.appendChild(p);
+        }
+    });
+
+    return chatLi;
+};
+
+const handleChat = () => {
+    userMessage = chatInput.value.trim();
+    userBin = fileBlobArr.length > 0 ? fileBlobArr[0] : null;
+
+    const contents = [];
+    if (userMessage) contents.push(userMessage);
+    if (userBin) contents.push(userBin);
+
+    if (contents.length === 0) return;
+
     chatInput.value = "";
     chatInput.style.height = `${inputInitHeight}px`;
+    clear();
+
+    // user's message
+    chatbox.appendChild(createChatLi(contents, "outgoing"));
+    chatbox.scrollTo(0, chatbox.scrollHeight);
 
-    // Append the user's message to the chatbox
-    chatbox.appendChild(createChatLi(userMessage, "outgoing"));
+    // "Thinking..."
+    const incomingChatLi = createChatLi("Thinking...", "incoming");
+    chatbox.appendChild(incomingChatLi);
     chatbox.scrollTo(0, chatbox.scrollHeight);
 
-    setTimeout(() => {
-        // Display "Thinking..." message while waiting for the response
-        const incomingChatLi = createChatLi("Thinking...", "incoming");
-        chatbox.appendChild(incomingChatLi);
+    // timeout
+    const TIMEOUT_MS = CONFIG.TIME_OUT_SECOND;
+    let isTimeout = false;
+    const timeoutId = setTimeout(() => {
+        isTimeout = true;
+        incomingChatLi.querySelector(".content").textContent = "Request timed 
out. Please try again.";
         chatbox.scrollTo(0, chatbox.scrollHeight);
-        generateResponse(incomingChatLi);
-    }, 600);
+    }, TIMEOUT_MS);
 
-    clear()
+    // send request
+    generateResponse(incomingChatLi, () => {
+        if (!isTimeout) clearTimeout(timeoutId);
+    });
 }
 
-const generateResponse = (chatElement) => {
+const generateResponse = (chatElement, callback) => {
     const API_URL = "/api/chat";
     const messageElement = chatElement.querySelector("p");
 
-    // init stream
+    // Initialize stream
     let accumulatedResponse = "";
     messageElement.textContent = "";
-    messageElement.id = "content"
+    messageElement.id = "content";
 
     fetch(API_URL, {
         method: "POST",
@@ -68,21 +98,26 @@ const generateResponse = (chatElement) => {
             const reader = response.body.getReader();
             const decoder = new TextDecoder();
 
+            // Function to read the stream recursively
             function readStream() {
                 return reader.read().then(({ done, value }) => {
                     if (done) {
+                        // Stream is complete, invoke the callback
+                        if (callback) callback();
                         return;
                     }
 
+                    // Decode the chunk and process events
                     const chunk = decoder.decode(value);
                     const events = chunk.split('\n\n');
 
                     events.forEach(event => {
                         if (event.startsWith('event:message')) {
-                            // extract data
+                            // Extract data from the event
                             const dataLine = event.split('\n')[1];
                             if (dataLine && dataLine.startsWith('data:')) {
                                 try {
+                                    // Parse the JSON data and update the UI
                                     const data = 
JSON.parse(dataLine.replace('data:', ''));
                                     accumulatedResponse += data.content;
                                     messageElement.textContent = 
accumulatedResponse;
@@ -94,18 +129,23 @@ const generateResponse = (chatElement) => {
                         }
                     });
 
+                    // Continue reading the stream
                     return readStream();
                 });
             }
 
+            // Start reading the stream
             return readStream();
         })
         .catch(error => {
             console.error('Error:', error);
             messageElement.classList.add("error");
             messageElement.textContent = "Oops! Something went wrong. Please 
try again.";
+
+            // Invoke the callback in case of error
+            if (callback) callback();
         });
-}
+};
 
 chatInput.addEventListener("input", () => {
     // Adjust the height of the input textarea based on its content
@@ -142,7 +182,8 @@ function filesToBlob(file) {
         fileName.title = file.name;
 
         let img = document.createElement('img');
-        img.src = "../static/file.png";
+        img.src = e.target.result;
+
 
         fileDiv.appendChild(img);
         fileDiv.appendChild(removeDiv);
diff --git a/llm/go-client/frontend/static/style.css 
b/llm/go-client/frontend/static/style.css
index 8c0a0442..2af632c5 100644
--- a/llm/go-client/frontend/static/style.css
+++ b/llm/go-client/frontend/static/style.css
@@ -74,9 +74,21 @@ header h2 {
   display: flex;
   list-style: none;
 }
+.chatbox .chat img {
+  max-width: 250px;
+  max-height: 100px;
+  height: auto;
+  border-radius: 10px;
+  margin-top: 5px;
+}
+
 .chatbox .outgoing {
   margin: 20px 0;
   justify-content: flex-end;
+  display: flex;
+  list-style: none;
+  align-items: flex-end;
+  flex-direction: column;
 }
 .chatbox .incoming span {
   width: 32px;
diff --git a/llm/go-client/frontend/templates/index.html 
b/llm/go-client/frontend/templates/index.html
index 35aef33a..3d82a98b 100644
--- a/llm/go-client/frontend/templates/index.html
+++ b/llm/go-client/frontend/templates/index.html
@@ -16,34 +16,41 @@
   </head>
   <body>
 
-    <div class="chatbot">
-      <header>
-        <h2>Chatbot</h2>
-        <span class="close-btn material-symbols-outlined">close</span>
-      </header>
-      <ul class="chatbox">
-        <li class="chat incoming">
-          <span class="material-symbols-outlined">smart_toy</span>
-          <p  class="chat incoming content" id="content">Hi there 👋</p>
-        </li>
-      </ul>
-      <div class="chat-input">
+  <div class="chatbot">
+    <header>
+      <h2>{{ .OllamaModel }}</h2>
+      <span class="close-btn material-symbols-outlined">close</span>
+    </header>
+    <ul class="chatbox">
+      <li class="chat incoming">
+        <span class="material-symbols-outlined">smart_toy</span>
+        <p  class="chat incoming content" id="content">Hi there 👋</p>
+      </li>
+    </ul>
+    <div class="chat-input">
 
-        <textarea placeholder="Enter a message..." spellcheck="false" 
required></textarea>
+      <textarea placeholder="Enter a message..." spellcheck="false" 
required></textarea>
 
-        <span id="send-btn" class="material-symbols-rounded">send</span>
+      <span id="send-btn" class="material-symbols-rounded">send</span>
 
-        <div id="drop" class="drop-box"></div>
+      <div id="drop" class="drop-box"></div>
 
-        <span1 id="add-btn" class="material-symbols-rounded">add</span1>
-
-      </div>
+      <span1 id="add-btn" class="material-symbols-rounded">add</span1>
 
     </div>
 
-    <input id="input" accept="image/png, image/jpeg, image/gif" type="file" 
style="display: none" >
+  </div>
+
+  <input id="input" accept="image/png, image/jpeg, image/gif" type="file" 
style="display: none" >
+
+</body>
+
+<script>
+  const CONFIG = {
+    TIME_OUT_SECOND: {{ .TimeoutSecond }} * 1000
+  };
 
-  </body>
+</script>
 
 
 </html>
\ No newline at end of file
diff --git a/llm/go-server/cmd/server.go b/llm/go-server/cmd/server.go
index 8f2b88ec..909388a2 100644
--- a/llm/go-server/cmd/server.go
+++ b/llm/go-server/cmd/server.go
@@ -23,6 +23,7 @@ import (
        "fmt"
        "log"
        "net/http"
+       "os"
        "runtime/debug"
 )
 
@@ -31,6 +32,8 @@ import (
        "dubbo.apache.org/dubbo-go/v3/protocol"
        "dubbo.apache.org/dubbo-go/v3/server"
 
+       "github.com/joho/godotenv"
+
        "github.com/tmc/langchaingo/llms"
        "github.com/tmc/langchaingo/llms/ollama"
 )
@@ -44,7 +47,7 @@ type ChatServer struct {
 }
 
 func NewChatServer() (*ChatServer, error) {
-       llm, err := ollama.New(ollama.WithModel("deepseek-r1:1.5b"))
+       llm, err := ollama.New(ollama.WithModel(os.Getenv("OLLAMA_MODEL")))
        if err != nil {
                return nil, err
        }
@@ -115,6 +118,20 @@ func (s *ChatServer) Chat(ctx context.Context, req 
*chat.ChatRequest, stream cha
 }
 
 func main() {
+
+       err := godotenv.Load(".env")
+       if err != nil {
+               fmt.Printf("Error loading .env file: %v\n", err)
+               return
+       }
+
+       _, exist := os.LookupEnv("OLLAMA_MODEL")
+
+       if !exist {
+               fmt.Println("OLLAMA_MODEL is not set")
+               return
+       }
+
        srv, err := server.NewServer(
                server.WithServerProtocol(
                        protocol.WithPort(20000),

Reply via email to