This is an automated email from the ASF dual-hosted git repository.

spacewander pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/apisix.git


The following commit(s) were added to refs/heads/master by this push:
     new f718dc8  feat: supports to logging apisix_latency and 
upstream_latency. (#6063)
f718dc8 is described below

commit f718dc8faf76d0fa5a3b5e652dddd17d1c1e3aa4
Author: Jager <[email protected]>
AuthorDate: Tue Jan 18 09:21:10 2022 +0800

    feat: supports to logging apisix_latency and upstream_latency. (#6063)
    
    Co-authored-by: Jagerzhang <[email protected]>
---
 apisix/utils/log-util.lua         | 48 ++++++++++++++++++++++-----------------
 t/plugin/http-logger-log-format.t |  2 +-
 2 files changed, 28 insertions(+), 22 deletions(-)

diff --git a/apisix/utils/log-util.lua b/apisix/utils/log-util.lua
index 3f268aa..7200c0c 100644
--- a/apisix/utils/log-util.lua
+++ b/apisix/utils/log-util.lua
@@ -73,6 +73,28 @@ local function get_custom_format_log(ctx, format)
 end
 _M.get_custom_format_log = get_custom_format_log
 
+
+local function latency_details_in_ms(ctx)
+    local latency = (ngx_now() - ngx.req.start_time()) * 1000
+    local upstream_latency, apisix_latency = nil, latency
+
+    if ctx.var.upstream_response_time then
+        upstream_latency = ctx.var.upstream_response_time * 1000
+        apisix_latency = apisix_latency - upstream_latency
+
+        -- The latency might be negative, as Nginx uses different time 
measurements in
+        -- different metrics.
+        -- See 
https://github.com/apache/apisix/issues/5146#issuecomment-928919399
+        if apisix_latency < 0 then
+            apisix_latency = 0
+        end
+    end
+
+    return latency, upstream_latency, apisix_latency
+end
+_M.latency_details_in_ms = latency_details_in_ms
+
+
 local function get_full_log(ngx, conf)
     local ctx = ngx.ctx.api_ctx
     local var = ctx.var
@@ -96,6 +118,8 @@ local function get_full_log(ngx, conf)
         }
     end
 
+    local latency, upstream_latency, apisix_latency = 
latency_details_in_ms(ctx)
+
     local log =  {
         request = {
             url = url,
@@ -120,7 +144,9 @@ local function get_full_log(ngx, conf)
         consumer = consumer,
         client_ip = core.request.get_remote_client_ip(ngx.ctx.api_ctx),
         start_time = ngx.req.start_time() * 1000,
-        latency = (ngx_now() - ngx.req.start_time()) * 1000
+        latency = latency,
+        upstream_latency = upstream_latency,
+        apisix_latency = apisix_latency
     }
 
     if ctx.resp_body then
@@ -185,26 +211,6 @@ function _M.get_req_original(ctx, conf)
 end
 
 
-function _M.latency_details_in_ms(ctx)
-    local latency = (ngx_now() - ngx.req.start_time()) * 1000
-    local upstream_latency, apisix_latency = nil, latency
-
-    if ctx.var.upstream_response_time then
-        upstream_latency = ctx.var.upstream_response_time * 1000
-        apisix_latency = apisix_latency - upstream_latency
-
-        -- The latency might be negative, as Nginx uses different time 
measurements in
-        -- different metrics.
-        -- See 
https://github.com/apache/apisix/issues/5146#issuecomment-928919399
-        if apisix_latency < 0 then
-            apisix_latency = 0
-        end
-    end
-
-    return latency, upstream_latency, apisix_latency
-end
-
-
 function _M.check_log_schema(conf)
     if conf.include_req_body_expr then
         local ok, err = expr.new(conf.include_req_body_expr)
diff --git a/t/plugin/http-logger-log-format.t 
b/t/plugin/http-logger-log-format.t
index a9d05b2..e1121b7 100644
--- a/t/plugin/http-logger-log-format.t
+++ b/t/plugin/http-logger-log-format.t
@@ -431,7 +431,7 @@ apikey: auth-one
 --- grep_error_log eval
 qr/request log: \{.+\}/
 --- grep_error_log_out eval
-qr/\Q{"client_ip":"127.0.0.1","consumer":{"username":"jack"},"latency":\E[^,]+\Q,"request":{"headers":{"apikey":"auth-one","connection":"close","host":"localhost"},"method":"GET","querystring":{},"size":\E\d+\Q,"uri":"\/hello","url":"http:\/\/localhost:1984\/hello"},"response":{"headers":{"connection":"close","content-length":"\E\d+\Q","content-type":"text\/plain","server":"\E[^"]+\Q"},"size":\E\d+\Q,"status":200},"route_id":"1","server":{"hostname":"\E[^"]+\Q","version":"\E[^"]+\Q"},"se
 [...]
+qr/\Q{"apisix_latency":\E[^,]+\Q,"client_ip":"127.0.0.1","consumer":{"username":"jack"},"latency":\E[^,]+\Q,"request":{"headers":{"apikey":"auth-one","connection":"close","host":"localhost"},"method":"GET","querystring":{},"size":\E\d+\Q,"uri":"\/hello","url":"http:\/\/localhost:1984\/hello"},"response":{"headers":{"connection":"close","content-length":"\E\d+\Q","content-type":"text\/plain","server":"\E[^"]+\Q"},"size":\E\d+\Q,"status":200},"route_id":"1","server":{"hostname":"\E[^"]+\Q"
 [...]
 --- wait: 0.5
 
 

Reply via email to