This is an automated email from the ASF dual-hosted git repository.

baoyuan pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/apisix.git


The following commit(s) were added to refs/heads/master by this push:
     new 09f3d215e feat(ai-proxy): add support for pushing logs in ai-proxy 
plugins (#12515)
09f3d215e is described below

commit 09f3d215eaece685389e34f74854418d72942316
Author: Ashish Tiwari <ashishjaitiwari15112...@gmail.com>
AuthorDate: Wed Aug 27 08:21:21 2025 +0530

    feat(ai-proxy): add support for pushing logs in ai-proxy plugins (#12515)
---
 apisix/plugins/ai-proxy-multi.lua             |   5 +
 apisix/plugins/ai-proxy.lua                   |   5 +
 apisix/plugins/ai-proxy/base.lua              |  21 ++
 apisix/plugins/ai-proxy/schema.lua            |  17 ++
 apisix/utils/log-util.lua                     |   9 +
 t/plugin/{ai-proxy.t => ai-proxy-kafka-log.t} | 414 ++++++++------------------
 t/plugin/ai-proxy-multi.openai-compatible.t   |   3 +-
 t/plugin/ai-proxy-multi.t                     |   2 +-
 t/plugin/ai-proxy.openai-compatible.t         |   2 +-
 t/plugin/ai-proxy.t                           |   2 +-
 10 files changed, 194 insertions(+), 286 deletions(-)

diff --git a/apisix/plugins/ai-proxy-multi.lua 
b/apisix/plugins/ai-proxy-multi.lua
index 4c2dff582..b162eee96 100644
--- a/apisix/plugins/ai-proxy-multi.lua
+++ b/apisix/plugins/ai-proxy-multi.lua
@@ -357,5 +357,10 @@ end
 
 _M.before_proxy = base.before_proxy
 
+function _M.log(conf, ctx)
+    if conf.logging then
+        base.set_logging(ctx, conf.logging.summaries, conf.logging.payloads)
+    end
+end
 
 return _M
diff --git a/apisix/plugins/ai-proxy.lua b/apisix/plugins/ai-proxy.lua
index d8a5ac17e..092eb6a08 100644
--- a/apisix/plugins/ai-proxy.lua
+++ b/apisix/plugins/ai-proxy.lua
@@ -54,5 +54,10 @@ end
 
 _M.before_proxy = base.before_proxy
 
+function _M.log(conf, ctx)
+    if conf.logging then
+        base.set_logging(ctx, conf.logging.summaries, conf.logging.payloads)
+    end
+end
 
 return _M
diff --git a/apisix/plugins/ai-proxy/base.lua b/apisix/plugins/ai-proxy/base.lua
index 4f1102885..0c188f1e4 100644
--- a/apisix/plugins/ai-proxy/base.lua
+++ b/apisix/plugins/ai-proxy/base.lua
@@ -24,6 +24,27 @@ local bad_request = ngx.HTTP_BAD_REQUEST
 
 local _M = {}
 
+function _M.set_logging(ctx, summaries, payloads)
+    if summaries then
+        ctx.llm_summary = {
+            model = ctx.var.llm_model,
+            duration = ctx.var.llm_time_to_first_token,
+            prompt_tokens = ctx.var.llm_prompt_tokens,
+            completion_tokens = ctx.var.llm_completion_tokens,
+        }
+    end
+    if payloads then
+        ctx.llm_request = {
+            messages = ctx.var.llm_request_body and 
ctx.var.llm_request_body.messages,
+            stream = ctx.var.request_type == "ai_stream"
+        }
+        ctx.llm_response_text = {
+            content = ctx.var.llm_response_text
+        }
+    end
+end
+
+
 function _M.before_proxy(conf, ctx)
     local ai_instance = ctx.picked_ai_instance
     local ai_driver = require("apisix.plugins.ai-drivers." .. 
ai_instance.provider)
diff --git a/apisix/plugins/ai-proxy/schema.lua 
b/apisix/plugins/ai-proxy/schema.lua
index 3510dca69..c6b674ace 100644
--- a/apisix/plugins/ai-proxy/schema.lua
+++ b/apisix/plugins/ai-proxy/schema.lua
@@ -102,6 +102,21 @@ local ai_instance_schema = {
     },
 }
 
+local logging_schema = {
+    type = "object",
+    properties = {
+        summaries = {
+            type = "boolean",
+            default = false,
+            description = "Record user request llm model, duration, req/res 
token"
+        },
+        payloads = {
+            type = "boolean",
+            default = false,
+            description = "Record user request and response payload"
+        }
+    }
+}
 
 _M.ai_proxy_schema = {
     type = "object",
@@ -117,6 +132,7 @@ _M.ai_proxy_schema = {
             }, -- add more providers later
 
         },
+        logging = logging_schema,
         auth = auth_schema,
         options = model_options_schema,
         timeout = {
@@ -176,6 +192,7 @@ _M.ai_proxy_multi_schema = {
             default = { algorithm = "roundrobin" }
         },
         instances = ai_instance_schema,
+        logging_schema = logging_schema,
         fallback_strategy = {
             type = "string",
             enum = { "instance_health_and_rate_limiting" },
diff --git a/apisix/utils/log-util.lua b/apisix/utils/log-util.lua
index c9cda1d6a..17a2e0e6d 100644
--- a/apisix/utils/log-util.lua
+++ b/apisix/utils/log-util.lua
@@ -290,6 +290,15 @@ function _M.get_log_entry(plugin_name, conf, ctx)
         end
     end
 
+    if ctx.llm_summary then
+        entry.llm_summary = ctx.llm_summary
+    end
+    if ctx.llm_request then
+        entry.llm_request = ctx.llm_request
+    end
+    if ctx.llm_response_text then
+        entry.llm_response_text = ctx.llm_response_text
+    end
     return entry, customized
 end
 
diff --git a/t/plugin/ai-proxy.t b/t/plugin/ai-proxy-kafka-log.t
similarity index 62%
copy from t/plugin/ai-proxy.t
copy to t/plugin/ai-proxy-kafka-log.t
index c99a6c11e..aa039987c 100644
--- a/t/plugin/ai-proxy.t
+++ b/t/plugin/ai-proxy-kafka-log.t
@@ -181,65 +181,7 @@ run_tests();
 
 __DATA__
 
-=== TEST 1: minimal viable configuration
---- config
-    location /t {
-        content_by_lua_block {
-            local plugin = require("apisix.plugins.ai-proxy")
-            local ok, err = plugin.check_schema({
-                provider = "openai",
-                options = {
-                    model = "gpt-4",
-                },
-                auth = {
-                    header = {
-                        some_header = "some_value"
-                    }
-                }
-            })
-
-            if not ok then
-                ngx.say(err)
-            else
-                ngx.say("passed")
-            end
-        }
-    }
---- response_body
-passed
-
-
-
-=== TEST 2: unsupported provider
---- config
-    location /t {
-        content_by_lua_block {
-            local plugin = require("apisix.plugins.ai-proxy")
-            local ok, err = plugin.check_schema({
-                provider = "some-unique",
-                options = {
-                    model = "gpt-4",
-                },
-                auth = {
-                    header = {
-                        some_header = "some_value"
-                    }
-                }
-            })
-
-            if not ok then
-                ngx.say(err)
-            else
-                ngx.say("passed")
-            end
-        }
-    }
---- response_body eval
-qr/.*property "provider" validation failed: matches none of the enum values.*/
-
-
-
-=== TEST 3: set route with wrong auth header
+=== TEST 1: set route with logging summaries and payloads
 --- config
     location /t {
         content_by_lua_block {
@@ -253,7 +195,7 @@ qr/.*property "provider" validation failed: matches none of 
the enum values.*/
                             "provider": "openai",
                             "auth": {
                                 "header": {
-                                    "Authorization": "Bearer wrongtoken"
+                                    "Authorization": "Bearer token"
                                 }
                             },
                             "options": {
@@ -264,8 +206,22 @@ qr/.*property "provider" validation failed: matches none 
of the enum values.*/
                             "override": {
                                 "endpoint": "http://localhost:6724";
                             },
-                            "ssl_verify": false
-                        }
+                            "ssl_verify": false,
+                            "logging": {
+                                "summaries": true,
+                                "payloads": true
+                            }
+                        },
+                            "kafka-logger": {
+                                "broker_list" :
+                                  {
+                                    "127.0.0.1":9092
+                                  },
+                                "kafka_topic" : "test2",
+                                "key" : "key1",
+                                "timeout" : 1,
+                                "batch_max_size": 1
+                            }
                     }
                 }]]
             )
@@ -281,17 +237,23 @@ passed
 
 
 
-=== TEST 4: send request
+=== TEST 2: send request
 --- request
 POST /anything
 { "messages": [ { "role": "system", "content": "You are a mathematician" }, { 
"role": "user", "content": "What is 1+1?"} ] }
---- error_code: 401
---- response_body
-Unauthorized
+--- more_headers
+Authorization: Bearer token
+--- error_log
+send data to kafka:
+llm_request
+llm_summary
+You are a mathematician
+gpt-35-turbo-instruct
+llm_response_text
 
 
 
-=== TEST 5: set route with right auth header
+=== TEST 3: set route with logging summary but no payload
 --- config
     location /t {
         content_by_lua_block {
@@ -316,8 +278,22 @@ Unauthorized
                             "override": {
                                 "endpoint": "http://localhost:6724";
                             },
-                            "ssl_verify": false
-                        }
+                            "ssl_verify": false,
+                            "logging": {
+                                "summaries": true,
+                                "payloads": false
+                            }
+                        },
+                            "kafka-logger": {
+                                "broker_list" :
+                                  {
+                                    "127.0.0.1":9092
+                                  },
+                                "kafka_topic" : "test2",
+                                "key" : "key1",
+                                "timeout" : 1,
+                                "batch_max_size": 1
+                            }
                     }
                 }]]
             )
@@ -333,64 +309,23 @@ passed
 
 
 
-=== TEST 6: send request
---- request
-POST /anything
-{ "messages": [ { "role": "system", "content": "You are a mathematician" }, { 
"role": "user", "content": "What is 1+1?"} ] }
---- more_headers
-Authorization: Bearer token
---- error_code: 200
---- response_body eval
-qr/\{ "content": "1 \+ 1 = 2\.", "role": "assistant" \}/
-
-
-
-=== TEST 7: send request with empty body
+=== TEST 4: send request
 --- request
 POST /anything
---- more_headers
-Authorization: Bearer token
---- error_code: 400
---- response_body_chomp
-failed to get request body: request body is empty
-
-
-
-=== TEST 8: send request with wrong method (GET) should work
---- request
-GET /anything
 { "messages": [ { "role": "system", "content": "You are a mathematician" }, { 
"role": "user", "content": "What is 1+1?"} ] }
 --- more_headers
 Authorization: Bearer token
---- error_code: 200
---- response_body eval
-qr/\{ "content": "1 \+ 1 = 2\.", "role": "assistant" \}/
+--- error_log
+send data to kafka:
+llm_summary
+gpt-35-turbo-instruct
+--- no_error_log
+llm_request
+llm_response_text
 
 
 
-=== TEST 9: wrong JSON in request body should give error
---- request
-GET /anything
-{}"messages": [ { "role": "system", "cont
---- error_code: 400
---- response_body
-{"message":"could not get parse JSON request body: Expected the end but found 
T_STRING at character 3"}
-
-
-
-=== TEST 10: content-type should be JSON
---- request
-POST /anything
-prompt%3Dwhat%2520is%25201%2520%252B%25201
---- more_headers
-Content-Type: application/x-www-form-urlencoded
---- error_code: 400
---- response_body chomp
-unsupported content-type: application/x-www-form-urlencoded, only 
application/json is supported
-
-
-
-=== TEST 11: model options being merged to request body
+=== TEST 5: set route with no logging summary and payload - default behaviour
 --- config
     location /t {
         content_by_lua_block {
@@ -408,114 +343,58 @@ unsupported content-type: 
application/x-www-form-urlencoded, only application/js
                                 }
                             },
                             "options": {
-                                "model": "some-model",
-                                "foo": "bar",
+                                "model": "gpt-35-turbo-instruct",
+                                "max_tokens": 512,
                                 "temperature": 1.0
                             },
                             "override": {
                                 "endpoint": "http://localhost:6724";
                             },
-                            "ssl_verify": false
-                        }
+                            "ssl_verify": false,
+                            "logging": {
+                                "summaries": false,
+                                "payloads": false
+                            }
+                        },
+                            "kafka-logger": {
+                                "broker_list" :
+                                  {
+                                    "127.0.0.1":9092
+                                  },
+                                "kafka_topic" : "test2",
+                                "key" : "key1",
+                                "timeout" : 1,
+                                "batch_max_size": 1
+                            }
                     }
                 }]]
             )
 
             if code >= 300 then
                 ngx.status = code
-                ngx.say(body)
-                return
             end
-
-            local code, body, actual_body = t("/anything",
-                ngx.HTTP_POST,
-                [[{
-                    "messages": [
-                        { "role": "system", "content": "You are a 
mathematician" },
-                        { "role": "user", "content": "What is 1+1?" }
-                    ]
-                }]],
-                nil,
-                {
-                    ["test-type"] = "options",
-                    ["Content-Type"] = "application/json",
-                }
-            )
-
-            ngx.status = code
-            ngx.say(actual_body)
-
+            ngx.say(body)
         }
     }
---- error_code: 200
---- response_body_chomp
-options_works
-
-
-
-=== TEST 12: override path
---- config
-    location /t {
-        content_by_lua_block {
-            local t = require("lib.test_admin").test
-            local code, body = t('/apisix/admin/routes/1',
-                 ngx.HTTP_PUT,
-                 [[{
-                    "uri": "/anything",
-                    "plugins": {
-                        "ai-proxy": {
-                            "provider": "openai",
-                            "model": "some-model",
-                            "auth": {
-                                "header": {
-                                    "Authorization": "Bearer token"
-                                }
-                            },
-                            "options": {
-                                "foo": "bar",
-                                "temperature": 1.0
-                            },
-                            "override": {
-                                "endpoint": "http://localhost:6724/random";
-                            },
-                            "ssl_verify": false
-                        }
-                    }
-                }]]
-            )
-
-            if code >= 300 then
-                ngx.status = code
-                ngx.say(body)
-                return
-            end
+--- response_body
+passed
 
-            local code, body, actual_body = t("/anything",
-                ngx.HTTP_POST,
-                [[{
-                    "messages": [
-                        { "role": "system", "content": "You are a 
mathematician" },
-                        { "role": "user", "content": "What is 1+1?" }
-                    ]
-                }]],
-                nil,
-                {
-                    ["test-type"] = "path",
-                    ["Content-Type"] = "application/json",
-                }
-            )
 
-            ngx.status = code
-            ngx.say(actual_body)
 
-        }
-    }
---- response_body_chomp
-path override works
+=== TEST 6: send request
+--- request
+POST /anything
+{ "messages": [ { "role": "system", "content": "You are a mathematician" }, { 
"role": "user", "content": "What is 1+1?"} ] }
+--- more_headers
+Authorization: Bearer token
+--- no_error_log
+llm_request
+llm_response_text
+llm_summary
 
 
 
-=== TEST 13: set route with stream = true (SSE)
+=== TEST 7: set route with stream = true (SSE) with ai-proxy-multi plugin
 --- config
     location /t {
         content_by_lua_block {
@@ -525,26 +404,46 @@ path override works
                  [[{
                     "uri": "/anything",
                     "plugins": {
-                        "ai-proxy": {
-                            "provider": "openai",
-                            "auth": {
-                                "header": {
-                                    "Authorization": "Bearer token"
+                        "ai-proxy-multi": {
+                            "instances": [
+                                {
+                                    "name": "self-hosted",
+                                    "provider": "openai-compatible",
+                                    "weight": 1,
+                                    "auth": {
+                                        "header": {
+                                            "Authorization": "Bearer token"
+                                        }
+                                    },
+                                    "options": {
+                                        "model": "custom-instruct",
+                                        "max_tokens": 512,
+                                        "temperature": 1.0,
+                                        "stream": true
+                                    },
+                                    "override": {
+                                        "endpoint": 
"http://localhost:7737/v1/chat/completions";
+                                    }
                                 }
-                            },
-                            "options": {
-                                "model": "gpt-35-turbo-instruct",
-                                "max_tokens": 512,
-                                "temperature": 1.0,
-                                "stream": true
-                            },
-                            "override": {
-                                "endpoint": "http://localhost:7737";
-                            },
-                            "ssl_verify": false
-                        }
+                            ],
+                            "ssl_verify": false,
+                            "logging": {
+                                "summaries": true,
+                                "payloads": true
+                            }
+                        },
+                            "kafka-logger": {
+                                "broker_list" :
+                                  {
+                                    "127.0.0.1":9092
+                                  },
+                                "kafka_topic" : "test2",
+                                "key" : "key1",
+                                "timeout" : 1,
+                                "batch_max_size": 1
+                            }
                     }
-                }]]
+                 }]]
             )
 
             if code >= 300 then
@@ -558,7 +457,7 @@ passed
 
 
 
-=== TEST 14: test is SSE works as expected
+=== TEST 8: test is SSE works as expected
 --- config
     location /t {
         content_by_lua_block {
@@ -585,6 +484,7 @@ passed
                 },
                 path = "/anything",
                 body = [[{
+                    "stream": true,
                     "messages": [
                         { "role": "system", "content": "some content" }
                     ]
@@ -614,60 +514,10 @@ passed
             ngx.print(#final_res .. final_res[6])
         }
     }
---- response_body_like eval
+--- response_body_eval
 qr/6data: \[DONE\]\n\n/
-
-
-
-=== TEST 15: proxy embedding endpoint
---- config
-    location /t {
-        content_by_lua_block {
-            local t = require("lib.test_admin").test
-            local code, body = t('/apisix/admin/routes/1',
-                 ngx.HTTP_PUT,
-                 [[{
-                    "uri": "/embeddings",
-                    "plugins": {
-                        "ai-proxy": {
-                            "provider": "openai",
-                            "auth": {
-                                "header": {
-                                    "Authorization": "Bearer token"
-                                }
-                            },
-                            "options": {
-                                "model": "text-embedding-ada-002",
-                                "encoding_format": "float"
-                            },
-                            "override": {
-                                "endpoint": 
"http://localhost:6724/v1/embeddings";
-                            }
-                        }
-                    }
-                }]]
-            )
-
-            if code >= 300 then
-                ngx.status = code
-                ngx.say(body)
-                return
-            end
-
-            ngx.say("passed")
-        }
-    }
---- response_body
-passed
-
-
-
-=== TEST 16: send request to embedding api
---- request
-POST /embeddings
-{
-    "input": "The food was delicious and the waiter..."
-}
---- error_code: 200
---- response_body_like eval
-qr/.*text-embedding-ada-002*/
+--- error_log
+send data to kafka:
+llm_request
+llm_summary
+some content
diff --git a/t/plugin/ai-proxy-multi.openai-compatible.t 
b/t/plugin/ai-proxy-multi.openai-compatible.t
index fe34b4f82..19a123a1d 100644
--- a/t/plugin/ai-proxy-multi.openai-compatible.t
+++ b/t/plugin/ai-proxy-multi.openai-compatible.t
@@ -266,7 +266,8 @@ passed
                 body = [[{
                     "messages": [
                         { "role": "system", "content": "some content" }
-                    ]
+                    ],
+                    "stream": true
                 }]],
             }
 
diff --git a/t/plugin/ai-proxy-multi.t b/t/plugin/ai-proxy-multi.t
index f557f173f..5434f7699 100644
--- a/t/plugin/ai-proxy-multi.t
+++ b/t/plugin/ai-proxy-multi.t
@@ -603,5 +603,5 @@ passed
             ngx.print(#final_res .. final_res[6])
         }
     }
---- response_body_like eval
+--- response_body_eval
 qr/6data: \[DONE\]\n\n/
diff --git a/t/plugin/ai-proxy.openai-compatible.t 
b/t/plugin/ai-proxy.openai-compatible.t
index 9168816fd..efeec6eeb 100644
--- a/t/plugin/ai-proxy.openai-compatible.t
+++ b/t/plugin/ai-proxy.openai-compatible.t
@@ -336,5 +336,5 @@ passed
             ngx.print(#final_res .. final_res[6])
         }
     }
---- response_body_like eval
+--- response_body_eval
 qr/6data: \[DONE\]\n\n/
diff --git a/t/plugin/ai-proxy.t b/t/plugin/ai-proxy.t
index c99a6c11e..e8f4e1173 100644
--- a/t/plugin/ai-proxy.t
+++ b/t/plugin/ai-proxy.t
@@ -614,7 +614,7 @@ passed
             ngx.print(#final_res .. final_res[6])
         }
     }
---- response_body_like eval
+--- response_body_eval
 qr/6data: \[DONE\]\n\n/
 
 

Reply via email to