This is an automated email from the ASF dual-hosted git repository.

alexstocks pushed a commit to branch main
in repository https://gitbox.apache.org/repos/asf/dubbo-go-samples.git


The following commit(s) were added to refs/heads/main by this push:
     new a640f886 feat:  integrates LLM into Nacos (#825)
a640f886 is described below

commit a640f886e5d1054606dadb83d1e7954875bad501
Author: ZeruiYang <[email protected]>
AuthorDate: Sat Apr 12 05:22:55 2025 -0700

    feat:  integrates LLM into Nacos (#825)
    
    * feat:  integrates LLM into Nacos
    
    * feat:  integrates LLM into Nacos
    
    * fix: warn of fmt.Errorf
---
 llm/.env.example               |  4 +++-
 llm/README.md                  |  4 ++++
 llm/README_zh.md               |  4 ++++
 llm/config/config.go           |  8 ++++++++
 llm/go-client/cmd/client.go    | 15 ++++++++++++---
 llm/go-client/frontend/main.go | 16 +++++++++++++---
 llm/go-server/cmd/server.go    | 32 ++++++++++++++++++++++++--------
 7 files changed, 68 insertions(+), 15 deletions(-)

diff --git a/llm/.env.example b/llm/.env.example
index dbae2ee7..e484c71d 100644
--- a/llm/.env.example
+++ b/llm/.env.example
@@ -18,4 +18,6 @@
 
 OLLAMA_MODELS = llava:7b, qwen2.5:7b
 OLLAMA_URL = http://localhost:11434
-TIME_OUT_SECOND = 300
\ No newline at end of file
+TIME_OUT_SECOND = 300
+
+NACOS_URL = localhost:8848
\ No newline at end of file
diff --git a/llm/README.md b/llm/README.md
index 95a27983..ec0f8d35 100644
--- a/llm/README.md
+++ b/llm/README.md
@@ -38,6 +38,10 @@ Default model uses ```llava:7b```, a novel end-to-end 
trained large multimodal m
 
 You can pull your favourite model and specify the demo to use the model in 
```.env``` file
 
+### **Install Nacos**
+
+Follow this instruction to [install and start Nacos 
server](https://dubbo-next.staged.apache.org/zh-cn/overview/reference/integrations/nacos/).
+
 ## 3. **Run the Example**
 
 You need to run all the commands in ```llm``` directory.
diff --git a/llm/README_zh.md b/llm/README_zh.md
index c58bee87..25fed96b 100644
--- a/llm/README_zh.md
+++ b/llm/README_zh.md
@@ -38,6 +38,10 @@ $ ollama pull llava:7b
 
 您可以自行pull自己喜欢的模型,并在 ```.env``` 文件中指定该demo使用模型。
 
+### **安装 Nacos**
+
+按照此说明[安装并运行 
Nacos](https://dubbo-next.staged.apache.org/zh-cn/overview/reference/integrations/nacos/).
+
 ## **3. 运行示例**
 
 以下所有的命令都需要在 ```llm``` 目录下运行.
diff --git a/llm/config/config.go b/llm/config/config.go
index 046a6544..eeaaf05c 100644
--- a/llm/config/config.go
+++ b/llm/config/config.go
@@ -34,6 +34,7 @@ type Config struct {
        OllamaURL    string
 
        TimeoutSeconds int
+       NacosURL       string
 }
 
 var (
@@ -86,6 +87,13 @@ func Load(envFile string) (*Config, error) {
                        }
                        config.TimeoutSeconds = timeout
                }
+
+               nacosURL := os.Getenv("NACOS_URL")
+               if nacosURL == "" {
+                       configErr = fmt.Errorf("OLLAMA_URL is not set")
+                       return
+               }
+               config.NacosURL = nacosURL
        })
 
        return config, configErr
diff --git a/llm/go-client/cmd/client.go b/llm/go-client/cmd/client.go
index d1474a36..5935b856 100644
--- a/llm/go-client/cmd/client.go
+++ b/llm/go-client/cmd/client.go
@@ -26,8 +26,9 @@ import (
 )
 
 import (
-       "dubbo.apache.org/dubbo-go/v3/client"
+       "dubbo.apache.org/dubbo-go/v3"
        _ "dubbo.apache.org/dubbo-go/v3/imports"
+       "dubbo.apache.org/dubbo-go/v3/registry"
 )
 
 import (
@@ -143,12 +144,20 @@ func main() {
 
        currentCtxID = createContext()
 
-       cli, err := client.NewClient(
-               client.WithClientURL("tri://127.0.0.1:20000"),
+       ins, err := dubbo.NewInstance(
+               dubbo.WithRegistry(
+                       registry.WithNacos(),
+                       registry.WithAddress(cfg.NacosURL),
+               ),
        )
        if err != nil {
                panic(err)
        }
+       // configure the params that only client layer cares
+       cli, err := ins.NewClient()
+       if err != nil {
+               panic(err)
+       }
 
        svc, err := chat.NewChatService(cli)
        if err != nil {
diff --git a/llm/go-client/frontend/main.go b/llm/go-client/frontend/main.go
index a9b37ec4..31477d58 100644
--- a/llm/go-client/frontend/main.go
+++ b/llm/go-client/frontend/main.go
@@ -23,8 +23,9 @@ import (
 )
 
 import (
-       "dubbo.apache.org/dubbo-go/v3/client"
+       "dubbo.apache.org/dubbo-go/v3"
        _ "dubbo.apache.org/dubbo-go/v3/imports"
+       "dubbo.apache.org/dubbo-go/v3/registry"
 
        "github.com/gin-contrib/sessions"
        "github.com/gin-contrib/sessions/cookie"
@@ -46,9 +47,18 @@ func main() {
        }
 
        // init Dubbo
-       cli, err := client.NewClient(
-               client.WithClientURL("tri://127.0.0.1:20000"),
+       ins, err := dubbo.NewInstance(
+               dubbo.WithRegistry(
+                       registry.WithNacos(),
+                       registry.WithAddress(cfg.NacosURL),
+               ),
        )
+       if err != nil {
+               panic(err)
+       }
+       // configure the params that only client layer cares
+       cli, err := ins.NewClient()
+
        if err != nil {
                panic(fmt.Sprintf("Error creating Dubbo client: %v", err))
        }
diff --git a/llm/go-server/cmd/server.go b/llm/go-server/cmd/server.go
index f1eb865b..73b87139 100644
--- a/llm/go-server/cmd/server.go
+++ b/llm/go-server/cmd/server.go
@@ -27,10 +27,10 @@ import (
 )
 
 import (
+       "dubbo.apache.org/dubbo-go/v3"
        _ "dubbo.apache.org/dubbo-go/v3/imports"
        "dubbo.apache.org/dubbo-go/v3/protocol"
-       "dubbo.apache.org/dubbo-go/v3/server"
-
+       "dubbo.apache.org/dubbo-go/v3/registry"
        "github.com/tmc/langchaingo/llms"
        "github.com/tmc/langchaingo/llms/ollama"
 )
@@ -40,15 +40,13 @@ import (
        chat "github.com/apache/dubbo-go-samples/llm/proto"
 )
 
+var cfg *config.Config
+
 type ChatServer struct {
        llms map[string]*ollama.LLM
 }
 
 func NewChatServer() (*ChatServer, error) {
-       cfg, err := config.GetConfig()
-       if err != nil {
-               return nil, fmt.Errorf("Error loading config: %v\n", err)
-       }
 
        llmMap := make(map[string]*ollama.LLM)
 
@@ -161,11 +159,29 @@ func (s *ChatServer) Chat(ctx context.Context, req 
*chat.ChatRequest, stream cha
 
 func main() {
 
-       srv, err := server.NewServer(
-               server.WithServerProtocol(
+       var err error
+       cfg, err = config.GetConfig()
+       if err != nil {
+               fmt.Printf("Error loading config: %v\n", err)
+               return
+       }
+
+       ins, err := dubbo.NewInstance(
+               dubbo.WithRegistry(
+                       registry.WithNacos(),
+                       registry.WithAddress(cfg.NacosURL),
+               ),
+               dubbo.WithProtocol(
+                       protocol.WithTriple(),
                        protocol.WithPort(20000),
                ),
        )
+
+       if err != nil {
+               panic(err)
+       }
+       srv, err := ins.NewServer()
+
        if err != nil {
                fmt.Printf("Error creating server: %v\n", err)
                return

Reply via email to