This is an automated email from the ASF dual-hosted git repository.

spacewander pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/apisix.git


The following commit(s) were added to refs/heads/master by this push:
     new e587bc4ab feat: allow each logger to define custom log format in its 
conf (#8806)
e587bc4ab is described below

commit e587bc4abb881306080b44e66b7705fa7f16be08
Author: 罗泽轩 <[email protected]>
AuthorDate: Wed Feb 8 11:37:55 2023 +0800

    feat: allow each logger to define custom log format in its conf (#8806)
---
 apisix/plugins/clickhouse-logger.lua           |   1 +
 apisix/plugins/elasticsearch-logger.lua        |   1 +
 apisix/plugins/file-logger.lua                 |   1 +
 apisix/plugins/google-cloud-logging.lua        |   1 +
 apisix/plugins/http-logger.lua                 |   1 +
 apisix/plugins/kafka-logger.lua                |   1 +
 apisix/plugins/loggly.lua                      |   1 +
 apisix/plugins/rocketmq-logger.lua             |   1 +
 apisix/plugins/skywalking-logger.lua           |   1 +
 apisix/plugins/sls-logger.lua                  |   1 +
 apisix/plugins/splunk-hec-logging.lua          |   1 +
 apisix/plugins/syslog.lua                      |   1 +
 apisix/plugins/tcp-logger.lua                  |   1 +
 apisix/plugins/tencent-cloud-cls.lua           |   1 +
 apisix/plugins/udp-logger.lua                  |   1 +
 apisix/stream/plugins/syslog.lua               |   1 +
 apisix/utils/log-util.lua                      |   7 +-
 docs/en/latest/plugins/clickhouse-logger.md    |   1 +
 docs/en/latest/plugins/elasticsearch-logger.md |   1 +
 docs/en/latest/plugins/file-logger.md          |   1 +
 docs/en/latest/plugins/google-cloud-logging.md |   1 +
 docs/en/latest/plugins/http-logger.md          |   1 +
 docs/en/latest/plugins/kafka-logger.md         |   1 +
 docs/en/latest/plugins/loggly.md               |   1 +
 docs/en/latest/plugins/rocketmq-logger.md      |   1 +
 docs/en/latest/plugins/skywalking-logger.md    |   1 +
 docs/en/latest/plugins/sls-logger.md           |   1 +
 docs/en/latest/plugins/splunk-hec-logging.md   |   1 +
 docs/en/latest/plugins/syslog.md               |   1 +
 docs/en/latest/plugins/tcp-logger.md           |   1 +
 docs/en/latest/plugins/tencent-cloud-cls.md    |   3 +-
 docs/en/latest/plugins/udp-logger.md           |   1 +
 docs/zh/latest/plugins/clickhouse-logger.md    |   1 +
 docs/zh/latest/plugins/elasticsearch-logger.md |   1 +
 docs/zh/latest/plugins/file-logger.md          |   1 +
 docs/zh/latest/plugins/google-cloud-logging.md |   3 +-
 docs/zh/latest/plugins/http-logger.md          |   1 +
 docs/zh/latest/plugins/kafka-logger.md         |   1 +
 docs/zh/latest/plugins/loggly.md               |   1 +
 docs/zh/latest/plugins/rocketmq-logger.md      |   1 +
 docs/zh/latest/plugins/skywalking-logger.md    |   1 +
 docs/zh/latest/plugins/sls-logger.md           |   1 +
 docs/zh/latest/plugins/splunk-hec-logging.md   |   1 +
 docs/zh/latest/plugins/syslog.md               |   1 +
 docs/zh/latest/plugins/tcp-logger.md           |   1 +
 docs/zh/latest/plugins/tencent-cloud-cls.md    |   1 +
 docs/zh/latest/plugins/udp-logger.md           |   1 +
 t/plugin/clickhouse-logger.t                   |  56 +++++++++++
 t/plugin/elasticsearch-logger.t                |  86 ++++++++++++++++
 t/plugin/file-logger.t                         | 131 +++++++++++++++++++++++++
 t/plugin/google-cloud-logging2.t               | 104 ++++++++++++++++++++
 t/plugin/http-logger-log-format.t              |  56 +++++++++++
 t/plugin/kafka-logger-log-format.t             |  56 +++++++++++
 t/plugin/loggly.t                              |  70 +++++++++++++
 t/plugin/rocketmq-logger-log-format.t          |  55 +++++++++++
 t/plugin/skywalking-logger.t                   |  53 ++++++++++
 t/plugin/sls-logger.t                          |  62 ++++++++++++
 t/plugin/splunk-hec-logging.t                  |  74 ++++++++++++++
 t/plugin/syslog.t                              |  68 +++++++++++++
 t/plugin/tcp-logger.t                          |  86 ++++++++++++++++
 t/plugin/tencent-cloud-cls.t                   |  91 +++++++++++++++++
 t/plugin/udp-logger.t                          |  86 ++++++++++++++++
 t/stream-plugin/syslog.t                       |  68 +++++++++++++
 63 files changed, 1254 insertions(+), 5 deletions(-)

diff --git a/apisix/plugins/clickhouse-logger.lua 
b/apisix/plugins/clickhouse-logger.lua
index d56289835..2c1208f92 100644
--- a/apisix/plugins/clickhouse-logger.lua
+++ b/apisix/plugins/clickhouse-logger.lua
@@ -40,6 +40,7 @@ local schema = {
         timeout = {type = "integer", minimum = 1, default = 3},
         name = {type = "string", default = "clickhouse logger"},
         ssl_verify = {type = "boolean", default = true},
+        log_format = {type = "object"},
     },
     oneOf = {
         {required = {"endpoint_addr", "user", "password", "database", 
"logtable"}},
diff --git a/apisix/plugins/elasticsearch-logger.lua 
b/apisix/plugins/elasticsearch-logger.lua
index 9e416d32c..d5a7d2fc5 100644
--- a/apisix/plugins/elasticsearch-logger.lua
+++ b/apisix/plugins/elasticsearch-logger.lua
@@ -52,6 +52,7 @@ local schema = {
             },
             required = {"index"}
         },
+        log_format = {type = "object"},
         auth = {
             type = "object",
             properties = {
diff --git a/apisix/plugins/file-logger.lua b/apisix/plugins/file-logger.lua
index 076a9e7e7..97140abfa 100644
--- a/apisix/plugins/file-logger.lua
+++ b/apisix/plugins/file-logger.lua
@@ -30,6 +30,7 @@ local schema = {
         path = {
             type = "string"
         },
+        log_format = {type = "object"},
         include_resp_body = {type = "boolean", default = false},
         include_resp_body_expr = {
             type = "array",
diff --git a/apisix/plugins/google-cloud-logging.lua 
b/apisix/plugins/google-cloud-logging.lua
index 67fa82446..3fb34ab43 100644
--- a/apisix/plugins/google-cloud-logging.lua
+++ b/apisix/plugins/google-cloud-logging.lua
@@ -86,6 +86,7 @@ local schema = {
             type = "string",
             default = "apisix.apache.org%2Flogs"
         },
+        log_format = {type = "object"},
     },
     oneOf = {
         { required = { "auth_config" } },
diff --git a/apisix/plugins/http-logger.lua b/apisix/plugins/http-logger.lua
index 2de099535..399d50da1 100644
--- a/apisix/plugins/http-logger.lua
+++ b/apisix/plugins/http-logger.lua
@@ -33,6 +33,7 @@ local schema = {
         uri = core.schema.uri_def,
         auth_header = {type = "string"},
         timeout = {type = "integer", minimum = 1, default = 3},
+        log_format = {type = "object"},
         include_req_body = {type = "boolean", default = false},
         include_resp_body = {type = "boolean", default = false},
         include_resp_body_expr = {
diff --git a/apisix/plugins/kafka-logger.lua b/apisix/plugins/kafka-logger.lua
index a4bfcda1b..2abbd1fce 100644
--- a/apisix/plugins/kafka-logger.lua
+++ b/apisix/plugins/kafka-logger.lua
@@ -37,6 +37,7 @@ local schema = {
             default = "default",
             enum = {"default", "origin"},
         },
+        log_format = {type = "object"},
         -- deprecated, use "brokers" instead
         broker_list = {
             type = "object",
diff --git a/apisix/plugins/loggly.lua b/apisix/plugins/loggly.lua
index a276d555d..92fafb782 100644
--- a/apisix/plugins/loggly.lua
+++ b/apisix/plugins/loggly.lua
@@ -85,6 +85,7 @@ local schema = {
             type = "boolean",
             default = true
         },
+        log_format = {type = "object"},
         severity_map = {
             type = "object",
             description = "upstream response code vs syslog severity mapping",
diff --git a/apisix/plugins/rocketmq-logger.lua 
b/apisix/plugins/rocketmq-logger.lua
index f1fa54b46..3262fee2b 100644
--- a/apisix/plugins/rocketmq-logger.lua
+++ b/apisix/plugins/rocketmq-logger.lua
@@ -46,6 +46,7 @@ local schema = {
         topic = {type = "string"},
         key = {type = "string"},
         tag = {type = "string"},
+        log_format = {type = "object"},
         timeout = {type = "integer", minimum = 1, default = 3},
         use_tls = {type = "boolean", default = false},
         access_key = {type = "string", default = ""},
diff --git a/apisix/plugins/skywalking-logger.lua 
b/apisix/plugins/skywalking-logger.lua
index 605c8f9ad..1c1ba590a 100644
--- a/apisix/plugins/skywalking-logger.lua
+++ b/apisix/plugins/skywalking-logger.lua
@@ -36,6 +36,7 @@ local schema = {
         endpoint_addr = core.schema.uri_def,
         service_name = {type = "string", default = "APISIX"},
         service_instance_name = {type = "string", default = "APISIX Instance 
Name"},
+        log_format = {type = "object"},
         timeout = {type = "integer", minimum = 1, default = 3},
         include_req_body = {type = "boolean", default = false},
     },
diff --git a/apisix/plugins/sls-logger.lua b/apisix/plugins/sls-logger.lua
index 8cbe009f3..6c2415ac6 100644
--- a/apisix/plugins/sls-logger.lua
+++ b/apisix/plugins/sls-logger.lua
@@ -34,6 +34,7 @@ local schema = {
     properties = {
         include_req_body = {type = "boolean", default = false},
         timeout = {type = "integer", minimum = 1, default= 5000},
+        log_format = {type = "object"},
         host = {type = "string"},
         port = {type = "integer"},
         project = {type = "string"},
diff --git a/apisix/plugins/splunk-hec-logging.lua 
b/apisix/plugins/splunk-hec-logging.lua
index 5f3902c69..8de8be6ec 100644
--- a/apisix/plugins/splunk-hec-logging.lua
+++ b/apisix/plugins/splunk-hec-logging.lua
@@ -56,6 +56,7 @@ local schema = {
             type = "boolean",
             default = true
         },
+        log_format = {type = "object"},
     },
     required = { "endpoint" },
 }
diff --git a/apisix/plugins/syslog.lua b/apisix/plugins/syslog.lua
index 5c0be9737..896c19f44 100644
--- a/apisix/plugins/syslog.lua
+++ b/apisix/plugins/syslog.lua
@@ -33,6 +33,7 @@ local schema = {
         sock_type = {type = "string", default = "tcp", enum = {"tcp", "udp"}},
         pool_size = {type = "integer", minimum = 5, default = 5},
         tls = {type = "boolean", default = false},
+        log_format = {type = "object"},
         include_req_body = {type = "boolean", default = false}
     },
     required = {"host", "port"}
diff --git a/apisix/plugins/tcp-logger.lua b/apisix/plugins/tcp-logger.lua
index 28e925f5e..e0bf9df30 100644
--- a/apisix/plugins/tcp-logger.lua
+++ b/apisix/plugins/tcp-logger.lua
@@ -32,6 +32,7 @@ local schema = {
         tls = {type = "boolean", default = false},
         tls_options = {type = "string"},
         timeout = {type = "integer", minimum = 1, default= 1000},
+        log_format = {type = "object"},
         include_req_body = {type = "boolean", default = false}
     },
     required = {"host", "port"}
diff --git a/apisix/plugins/tencent-cloud-cls.lua 
b/apisix/plugins/tencent-cloud-cls.lua
index 8d1f2d001..cc01a7190 100644
--- a/apisix/plugins/tencent-cloud-cls.lua
+++ b/apisix/plugins/tencent-cloud-cls.lua
@@ -41,6 +41,7 @@ local schema = {
         include_req_body = { type = "boolean", default = false },
         include_resp_body = { type = "boolean", default = false },
         global_tag = { type = "object" },
+        log_format = {type = "object"},
     },
     encrypt_fields = {"secret_key"},
     required = { "cls_host", "cls_topic", "secret_id", "secret_key" }
diff --git a/apisix/plugins/udp-logger.lua b/apisix/plugins/udp-logger.lua
index 45bfb58ed..e220cb19e 100644
--- a/apisix/plugins/udp-logger.lua
+++ b/apisix/plugins/udp-logger.lua
@@ -30,6 +30,7 @@ local schema = {
         host = {type = "string"},
         port = {type = "integer", minimum = 0},
         timeout = {type = "integer", minimum = 1, default = 3},
+        log_format = {type = "object"},
         include_req_body = {type = "boolean", default = false}
     },
     required = {"host", "port"}
diff --git a/apisix/stream/plugins/syslog.lua b/apisix/stream/plugins/syslog.lua
index 4f99cab8b..2c76ca693 100644
--- a/apisix/stream/plugins/syslog.lua
+++ b/apisix/stream/plugins/syslog.lua
@@ -30,6 +30,7 @@ local schema = {
         flush_limit = {type = "integer", minimum = 1, default = 4096},
         drop_limit = {type = "integer", default = 1048576},
         timeout = {type = "integer", minimum = 1, default = 3000},
+        log_format = {type = "object"},
         sock_type = {type = "string", default = "tcp", enum = {"tcp", "udp"}},
         pool_size = {type = "integer", minimum = 5, default = 5},
         tls = {type = "boolean", default = false}
diff --git a/apisix/utils/log-util.lua b/apisix/utils/log-util.lua
index 500c24238..4aecf290e 100644
--- a/apisix/utils/log-util.lua
+++ b/apisix/utils/log-util.lua
@@ -217,11 +217,12 @@ function _M.get_log_entry(plugin_name, conf, ctx)
     local entry
     local customized = false
 
-    if metadata and metadata.value.log_format
+    local has_meta_log_format = metadata and metadata.value.log_format
         and core.table.nkeys(metadata.value.log_format) > 0
-    then
+
+    if conf.log_format or has_meta_log_format then
         customized = true
-        entry = get_custom_format_log(ctx, metadata.value.log_format)
+        entry = get_custom_format_log(ctx, conf.log_format or 
metadata.value.log_format)
     else
         if is_http then
             entry = get_full_log(ngx, conf)
diff --git a/docs/en/latest/plugins/clickhouse-logger.md 
b/docs/en/latest/plugins/clickhouse-logger.md
index 465ebb945..96cb50a7e 100644
--- a/docs/en/latest/plugins/clickhouse-logger.md
+++ b/docs/en/latest/plugins/clickhouse-logger.md
@@ -44,6 +44,7 @@ The `clickhouse-logger` Plugin is used to push logs to 
[ClickHouse](https://clic
 | timeout       | integer | False    | 3                   | [1,...]      | 
Time to keep the connection alive for after sending a request. |
 | name          | string  | False    | "clickhouse logger" |              | 
Unique identifier for the logger.                              |
 | ssl_verify    | boolean | False    | true                | [true,false] | 
When set to `true`, verifies SSL.                              |
+| log_format       | object  | False    |              |              | Log 
format declared as key value pairs in JSON format. Values only support strings. 
[APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 
 NOTE: `encrypt_fields = {"password"}` is also defined in the schema, which 
means that the field will be stored encrypted in etcd. See [encrypted storage 
fields](../plugin-develop.md#encrypted-storage-fields).
 
diff --git a/docs/en/latest/plugins/elasticsearch-logger.md 
b/docs/en/latest/plugins/elasticsearch-logger.md
index b9e239dc9..9f089628f 100644
--- a/docs/en/latest/plugins/elasticsearch-logger.md
+++ b/docs/en/latest/plugins/elasticsearch-logger.md
@@ -42,6 +42,7 @@ When the Plugin is enabled, APISIX will serialize the request 
context informatio
 | field         | array   | True     |                             | 
Elasticsearch `field` configuration.                          |
 | field.index   | string  | True     |                             | 
Elasticsearch [_index 
field](https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-index-field.html#mapping-index-field).
 |
 | field.type    | string  | False    | Elasticsearch default value | 
Elasticsearch [_type 
field](https://www.elastic.co/guide/en/elasticsearch/reference/7.17/mapping-type-field.html#mapping-type-field).
 |
+| log_format | object | False    |          | Log format declared as key value 
pairs in JSON format. Values only support strings. 
[APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 | auth          | array   | False    |                             | 
Elasticsearch 
[authentication](https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html)
 configuration. |
 | auth.username | string  | True     |                             | 
Elasticsearch 
[authentication](https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html)
 username. |
 | auth.password | string  | True     |                             | 
Elasticsearch 
[authentication](https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html)
 password. |
diff --git a/docs/en/latest/plugins/file-logger.md 
b/docs/en/latest/plugins/file-logger.md
index 1f2c94b35..6b89e6e4c 100644
--- a/docs/en/latest/plugins/file-logger.md
+++ b/docs/en/latest/plugins/file-logger.md
@@ -46,6 +46,7 @@ The `file-logger` Plugin is used to push log streams to a 
specific location.
 | Name | Type   | Required | Description   |
 | ---- | ------ | -------- | ------------- |
 | path | string | True     | Log file path. |
+| log_format | object | False    | Log format declared as key value pairs in 
JSON format. Values only support strings. [APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 | include_resp_body      | boolean | False     | When set to `true` includes 
the response body in the log file.                                              
                                                                                
                                  |
 | include_resp_body_expr | array   | False     | When the `include_resp_body` 
attribute is set to `true`, use this to filter based on 
[lua-resty-expr](https://github.com/api7/lua-resty-expr). If present, only logs 
the response into file if the expression evaluates to `true`. |
 
diff --git a/docs/en/latest/plugins/google-cloud-logging.md 
b/docs/en/latest/plugins/google-cloud-logging.md
index c6c954a7e..eaf2536cc 100644
--- a/docs/en/latest/plugins/google-cloud-logging.md
+++ b/docs/en/latest/plugins/google-cloud-logging.md
@@ -46,6 +46,7 @@ This plugin also allows to push logs as a batch to your 
Google Cloud Logging Ser
 | ssl_verify              | False    | true                                    
                                                                                
                                                                             | 
When set to `true`, enables SSL verification as mentioned in [OpenResty 
docs](https://github.com/openresty/lua-nginx-module#tcpsocksslhandshake).       
           |
 | resource                | False    | {"type": "global"}                      
                                                                                
                                                                             | 
Google monitor resource. See 
[MonitoredResource](https://cloud.google.com/logging/docs/reference/v2/rest/v2/MonitoredResource)
 for more details.                   |
 | log_id                  | False    | apisix.apache.org%2Flogs                
                                                                                
                                                                             | 
Google Cloud logging ID. See 
[LogEntry](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry) 
for details.                                          |
+| log_format       | False    |                             | Log format 
declared as key value pairs in JSON format. Values only support strings. 
[APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 
 NOTE: `encrypt_fields = {"auth_config.private_key"}` is also defined in the 
schema, which means that the field will be stored encrypted in etcd. See 
[encrypted storage fields](../plugin-develop.md#encrypted-storage-fields).
 
diff --git a/docs/en/latest/plugins/http-logger.md 
b/docs/en/latest/plugins/http-logger.md
index f46e2b077..582f5a079 100644
--- a/docs/en/latest/plugins/http-logger.md
+++ b/docs/en/latest/plugins/http-logger.md
@@ -40,6 +40,7 @@ This will allow the ability to send log data requests as JSON 
objects to monitor
 | uri                    | string  | True     |               |                
      | URI of the HTTP/HTTPS server.                                           
                                                                                
                                                                 |
 | auth_header            | string  | False    |               |                
      | Authorization headers if required.                                      
                                                                                
                                                                 |
 | timeout                | integer | False    | 3             | [1,...]        
      | Time to keep the connection alive for after sending a request.          
                                                                                
                                                                 |
+| log_format | object | False    |      |               | Log format declared 
as key value pairs in JSON format. Values only support strings. 
[APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 | include_req_body       | boolean | False    | false         | [false, true]  
      | When set to `true` includes the request body in the log. If the request 
body is too big to be kept in the memory, it can't be logged due to Nginx's 
limitations.                                                         |
 | include_resp_body      | boolean | False    | false         | [false, true]  
      | When set to `true` includes the response body in the log.               
                                                                                
                                                                 |
 | include_resp_body_expr | array   | False    |               |                
      | When the `include_resp_body` attribute is set to `true`, use this to 
filter based on [lua-resty-expr](https://github.com/api7/lua-resty-expr). If 
present, only logs the response if the expression evaluates to `true`. |
diff --git a/docs/en/latest/plugins/kafka-logger.md 
b/docs/en/latest/plugins/kafka-logger.md
index 24ec21de5..0155d4383 100644
--- a/docs/en/latest/plugins/kafka-logger.md
+++ b/docs/en/latest/plugins/kafka-logger.md
@@ -52,6 +52,7 @@ It might take some time to receive the log data. It will be 
automatically sent a
 | timeout                | integer | False    | 3              | [1,...]       
        | Timeout for the upstream to send data.                                
                                                                                
                                                                                
                                                                                
                           |
 | name                   | string  | False    | "kafka logger" |               
        | Unique identifier for the batch processor.                            
                                                                                
                                                                                
                                                                                
                           |
 | meta_format            | enum    | False    | "default"      | 
["default","origin"] | Format to collect the request information. Setting to 
`default` collects the information in JSON format and `origin` collects the 
information with the original HTTP request. See 
[examples](#meta_format-example) below.                                         
                                                                               |
+| log_format | object | False    |      |               | Log format declared 
as key value pairs in JSON format. Values only support strings. 
[APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 | include_req_body       | boolean | False    | false          | [false, true] 
        | When set to `true` includes the request body in the log. If the 
request body is too big to be kept in the memory, it can't be logged due to 
Nginx's limitations.                                                            
                                                                                
                                     |
 | include_req_body_expr  | array   | False    |                |               
        | Filter for when the `include_req_body` attribute is set to `true`. 
Request body is only logged when the expression set here evaluates to `true`. 
See [lua-resty-expr](https://github.com/api7/lua-resty-expr) for more.          
                                                                                
                                |
 | include_resp_body      | boolean | False    | false          | [false, true] 
        | When set to `true` includes the response body in the log.             
                                                                                
                                                                                
                                                                                
                           |
diff --git a/docs/en/latest/plugins/loggly.md b/docs/en/latest/plugins/loggly.md
index a870f5093..2b83b3426 100644
--- a/docs/en/latest/plugins/loggly.md
+++ b/docs/en/latest/plugins/loggly.md
@@ -43,6 +43,7 @@ When the maximum batch size is exceeded, the data in the 
queue is pushed to Logg
 | severity               | string (enum) | False    | INFO    | Syslog log 
event severity level. Choose between: `DEBUG`, `INFO`, `NOTICE`, `WARNING`, 
`ERR`, `CRIT`, `ALERT`, and `EMEGR`.                                            
                                                   |
 | severity_map           | object        | False    | nil     | A way to map 
upstream HTTP response codes to Syslog severity. Key-value pairs where keys are 
the HTTP response codes and the values are the Syslog severity levels. For 
example `{"410": "CRIT"}`.                       |
 | tags                   | array         | False    |         | Metadata to be 
included with any event log to aid in segmentation and filtering.               
                                                                                
                                          |
+| log_format       | object  | False    |              | Log format declared 
as key value pairs in JSON format. Values only support strings. 
[APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 | include_req_body       | boolean       | False    | false   | When set to 
`true` includes the request body in the log. If the request body is too big to 
be kept in the memory, it can't be logged due to Nginx's limitations.           
                                              |
 | include_resp_body      | boolean       | False    | false   | When set to 
`true` includes the response body in the log.                                   
                                                                                
                                              |
 | include_resp_body_expr | array         | False    |         | When the 
`include_resp_body` attribute is set to `true`, use this to filter based on 
[lua-resty-expr](https://github.com/api7/lua-resty-expr). If present, only logs 
the response if the expression evaluates to `true`. |
diff --git a/docs/en/latest/plugins/rocketmq-logger.md 
b/docs/en/latest/plugins/rocketmq-logger.md
index 003724b81..b4f99bd24 100644
--- a/docs/en/latest/plugins/rocketmq-logger.md
+++ b/docs/en/latest/plugins/rocketmq-logger.md
@@ -40,6 +40,7 @@ It might take some time to receive the log data. It will be 
automatically sent a
 | topic                  | string  | True     |                   |            
           | Target topic to push the data to.                                  
                                                                                
                                                                       |
 | key                    | string  | False    |                   |            
           | Key of the messages.                                               
                                                                                
                                                                       |
 | tag                    | string  | False    |                   |            
           | Tag of the messages.                                               
                                                                                
                                                                       |
+| log_format | object | False    |         |            | Log format declared 
as key value pairs in JSON format. Values only support strings. 
[APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 | timeout                | integer | False    | 3                 | [1,...]    
           | Timeout for the upstream to send data.                             
                                                                                
                                                                       |
 | use_tls                | boolean | False    | false             |            
           | When set to `true`, uses TLS.                                      
                                                                                
                                                                       |
 | access_key             | string  | False    | ""                |            
           | Access key for ACL. Setting to an empty string will disable the 
ACL.                                                                            
                                                                          |
diff --git a/docs/en/latest/plugins/skywalking-logger.md 
b/docs/en/latest/plugins/skywalking-logger.md
index 411fc5f98..a4820485c 100644
--- a/docs/en/latest/plugins/skywalking-logger.md
+++ b/docs/en/latest/plugins/skywalking-logger.md
@@ -39,6 +39,7 @@ If there is an existing tracing context, it sets up the 
trace-log correlation au
 | endpoint_addr         | string  | True     |                        |        
       | URI of the SkyWalking OAP server.                                      
                                      |
 | service_name          | string  | False    | "APISIX"               |        
       | Service name for the SkyWalking reporter.                              
                                      |
 | service_instance_name | string  | False    | "APISIX Instance Name" |        
       | Service instance name for the SkyWalking reporter. Set it to 
`$hostname` to directly get the local hostname. |
+| log_format | object | False    |         |            | Log format declared 
as key value pairs in JSON format. Values only support strings. 
[APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 | timeout               | integer | False    | 3                      | 
[1,...]       | Time to keep the connection alive for after sending a request.  
                                             |
 | name                  | string  | False    | "skywalking logger"    |        
       | Unique identifier to identify the logger.                              
                                      |
 | include_req_body      | boolean | False    | false                  | 
[false, true] | When set to `true` includes the request body in the log.        
                                             |
diff --git a/docs/en/latest/plugins/sls-logger.md 
b/docs/en/latest/plugins/sls-logger.md
index 53ea083ba..bafd2809d 100644
--- a/docs/en/latest/plugins/sls-logger.md
+++ b/docs/en/latest/plugins/sls-logger.md
@@ -40,6 +40,7 @@ It might take some time to receive the log data. It will be 
automatically sent a
 | host              | True     | IP address or the hostname of the TCP server. 
See [Alibaba Cloud log service 
documentation](https://www.alibabacloud.com/help/en/log-service/latest/endpoints)
 for details. Use IP address instead of domain. |
 | port              | True     | Target upstream port. Defaults to `10009`.    
                                                                                
                                                                                
                                  |
 | timeout           | False    | Timeout for the upstream to send data.        
                                                                                
                                                                                
                                  |
+| log_format       | False    | Log format declared as key value pairs in JSON 
format. Values only support strings. [APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 | project           | True     | Project name in Alibaba Cloud log service. 
Create SLS before using this Plugin.                                            
                                                                                
                                         |
 | logstore          | True     | logstore name in Ali Cloud log service. 
Create SLS before using this Plugin.                                            
                                                                                
                                        |
 | access_key_id     | True     | AccessKey ID in Alibaba Cloud. See 
[Authorization](https://www.alibabacloud.com/help/en/log-service/latest/create-a-ram-user-and-authorize-the-ram-user-to-access-log-service)
 for more details.                                                              
       |
diff --git a/docs/en/latest/plugins/splunk-hec-logging.md 
b/docs/en/latest/plugins/splunk-hec-logging.md
index 69addb145..5a3d670b1 100644
--- a/docs/en/latest/plugins/splunk-hec-logging.md
+++ b/docs/en/latest/plugins/splunk-hec-logging.md
@@ -43,6 +43,7 @@ When the Plugin is enabled, APISIX will serialize the request 
context informatio
 | endpoint.channel | False    |         | Splunk HEC send data channel 
identifier. Read more: [About HTTP Event Collector Indexer 
Acknowledgment](https://docs.splunk.com/Documentation/Splunk/8.2.3/Data/AboutHECIDXAck).
 |
 | endpoint.timeout | False    | 10      | Splunk HEC send data timeout in 
seconds.                                                                        
                                                                 |
 | ssl_verify       | False    | true    | When set to `true` enables SSL 
verification as per [OpenResty 
docs](https://github.com/openresty/lua-nginx-module#tcpsocksslhandshake).       
                                   |
+| log_format       | False    |                             | Log format 
declared as key value pairs in JSON format. Values only support strings. 
[APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 
 This Plugin supports using batch processors to aggregate and process entries 
(logs/data) in a batch. This avoids the need for frequently submitting the 
data. The batch processor submits data every `5` seconds or when the data in 
the queue reaches `1000`. See [Batch 
Processor](../batch-processor.md#configuration) for more information or setting 
your custom configuration.
 
diff --git a/docs/en/latest/plugins/syslog.md b/docs/en/latest/plugins/syslog.md
index 12fce0b7f..494250851 100644
--- a/docs/en/latest/plugins/syslog.md
+++ b/docs/en/latest/plugins/syslog.md
@@ -45,6 +45,7 @@ Logs can be set as JSON objects.
 | drop_limit       | integer | False    | 1048576      |               | 
Maximum size of the buffer (KB) and the current message before the current 
message is dropped because of the size limit. |
 | sock_type        | string  | False    | "tcp"        | ["tcp", "udp] | 
Transport layer protocol to use.                                                
                                         |
 | pool_size        | integer | False    | 5            | [5, ...]      | 
Keep-alive pool size used by `sock:keepalive`.                                  
                                         |
+| log_format       | object  | False    |              |              | Log 
format declared as key value pairs in JSON format. Values only support strings. 
[APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 | include_req_body | boolean | False    | false        |               | When 
set to `true` includes the request body in the log.                             
                                    |
 
 This Plugin supports using batch processors to aggregate and process entries 
(logs/data) in a batch. This avoids the need for frequently submitting the 
data. The batch processor submits data every `5` seconds or when the data in 
the queue reaches `1000`. See [Batch 
Processor](../batch-processor.md#configuration) for more information or setting 
your custom configuration.
diff --git a/docs/en/latest/plugins/tcp-logger.md 
b/docs/en/latest/plugins/tcp-logger.md
index a5a722a80..c9d68ef5e 100644
--- a/docs/en/latest/plugins/tcp-logger.md
+++ b/docs/en/latest/plugins/tcp-logger.md
@@ -42,6 +42,7 @@ This plugin also allows to push logs as a batch to your 
external TCP server. It
 | host             | string  | True     |         |              | IP address 
or the hostname of the TCP server.            |
 | port             | integer | True     |         | [0,...]      | Target 
upstream port.                                    |
 | timeout          | integer | False    | 1000    | [1,...]      | Timeout for 
the upstream to send data.                   |
+| log_format       | object  | False    |         |              | Log format 
declared as key value pairs in JSON format. Values only support strings. 
[APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 | tls              | boolean | False    | false   |              | When set to 
`true` performs SSL verification.            |
 | tls_options      | string  | False    |         |              | TLS 
options.                                             |
 | include_req_body | boolean | False    | false   |              | When set to 
`true` includes the request body in the log. |
diff --git a/docs/en/latest/plugins/tencent-cloud-cls.md 
b/docs/en/latest/plugins/tencent-cloud-cls.md
index fb3210100..c88d223a1 100644
--- a/docs/en/latest/plugins/tencent-cloud-cls.md
+++ b/docs/en/latest/plugins/tencent-cloud-cls.md
@@ -30,7 +30,7 @@ description: This document contains information about the 
Apache APISIX tencent-
 
 ## Description
 
-The `tencent-cloud-cls` Plugin uses [TencentCloud 
CLS](https://cloud.tencent.com/document/product/614)API to forward APISIX logs 
to your topic.
+The `tencent-cloud-cls` Plugin uses [TencentCloud 
CLS](https://cloud.tencent.com/document/product/614) API to forward APISIX logs 
to your topic.
 
 ## Attributes
 
@@ -44,6 +44,7 @@ The `tencent-cloud-cls` Plugin uses [TencentCloud 
CLS](https://cloud.tencent.com
 | include_req_body  | boolean | No       | false   | [false, true] | When set 
to `true` includes the request body in the log. If the request body is too big 
to be kept in the memory, it can't be logged due to NGINX's limitations. |
 | include_resp_body | boolean | No       | false   | [false, true] | When set 
to `true` includes the response body in the log.                                
                                                                        |
 | global_tag        | object  | No       |         |               | kv pairs 
in JSON,send with each log.                                                     
                                                                        |
+| log_format       | object  | No    |              |              | Log 
format declared as key value pairs in JSON format. Values only support strings. 
[APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 
 NOTE: `encrypt_fields = {"secret_key"}` is also defined in the schema, which 
means that the field will be stored encrypted in etcd. See [encrypted storage 
fields](../plugin-develop.md#encrypted-storage-fields).
 
diff --git a/docs/en/latest/plugins/udp-logger.md 
b/docs/en/latest/plugins/udp-logger.md
index 9ee36ac3c..d92100e53 100644
--- a/docs/en/latest/plugins/udp-logger.md
+++ b/docs/en/latest/plugins/udp-logger.md
@@ -42,6 +42,7 @@ This plugin also allows to push logs as a batch to your 
external UDP server. It
 | host             | string  | True     |              |              | IP 
address or the hostname of the UDP server.            |
 | port             | integer | True     |              | [0,...]      | Target 
upstream port.                                    |
 | timeout          | integer | False    | 3            | [1,...]      | 
Timeout for the upstream to send data.                   |
+| log_format       | object  | False    |              |              | Log 
format declared as key value pairs in JSON format. Values only support strings. 
[APISIX](../apisix-variable.md) or 
[Nginx](http://nginx.org/en/docs/varindex.html) variables can be used by 
prefixing the string with `$`. |
 | name             | string  | False    | "udp logger" |              | Unique 
identifier for the batch processor.               |
 | include_req_body | boolean | False    | false        |              | When 
set to `true` includes the request body in the log. |
 
diff --git a/docs/zh/latest/plugins/clickhouse-logger.md 
b/docs/zh/latest/plugins/clickhouse-logger.md
index aed09ca71..8957661f6 100644
--- a/docs/zh/latest/plugins/clickhouse-logger.md
+++ b/docs/zh/latest/plugins/clickhouse-logger.md
@@ -44,6 +44,7 @@ description: 本文介绍了 API 网关 Apache APISIX 如何使用 clickhouse-lo
 | timeout          | integer | 否     | 3                   | [1,...]      | 
发送请求后保持连接活动的时间。                             |
 | name             | string  | 否     | "clickhouse logger" |              | 标识 
logger 的唯一标识符。                                |
 | ssl_verify       | boolean | 否     | true                | [true,false] | 
当设置为 `true` 时,验证证书。                                                |
+| log_format             | object  | 否   |          |         | 以 JSON 
格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 
变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 
 注意:schema 中还定义了 `encrypt_fields = {"password"}`,这意味着该字段将会被加密存储在 etcd 中。具体参考 
[加密存储字段](../plugin-develop.md#加密存储字段)。
 
diff --git a/docs/zh/latest/plugins/elasticsearch-logger.md 
b/docs/zh/latest/plugins/elasticsearch-logger.md
index 0b47001ed..12f7517e7 100644
--- a/docs/zh/latest/plugins/elasticsearch-logger.md
+++ b/docs/zh/latest/plugins/elasticsearch-logger.md
@@ -43,6 +43,7 @@ description: 本文介绍了 API 网关 Apache APISIX 的 
elasticsearch-logger 
 | field         | array   | 是       |                      | Elasticsearch 
`field`配置信息。                                |
 | field.index   | string  | 是       |                      | Elasticsearch 
`[_index 
field](https://www.elastic.co/guide/en/elasticsearch/reference/current/mapping-index-field.html#mapping-index-field)`。
 |
 | field.type    | string  | 否       | Elasticsearch 默认值 | Elasticsearch 
`[_type 
field](https://www.elastic.co/guide/en/elasticsearch/reference/7.17/mapping-type-field.html#mapping-type-field)`
 |
+| log_format    | object  | 否   |          | 以 JSON 
格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 
变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 | auth          | array   | 否       |                      | Elasticsearch 
`[authentication](https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html)`
 配置信息 |
 | auth.username | string  | 是       |                      | Elasticsearch 
`[authentication](https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html)`
 用户名。 |
 | auth.password | string  | 是       |                      | Elasticsearch 
`[authentication](https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html)`
 密码。 |
diff --git a/docs/zh/latest/plugins/file-logger.md 
b/docs/zh/latest/plugins/file-logger.md
index 2677ee6a0..df591df4e 100644
--- a/docs/zh/latest/plugins/file-logger.md
+++ b/docs/zh/latest/plugins/file-logger.md
@@ -48,6 +48,7 @@ description: API 网关 Apache APISIX file-logger 插件可用于将日志数据
 | 名称             | 类型     | 必选项 | 描述                                           
  |
 | ---------------- | ------- | ------ | 
------------------------------------------------ |
 | path             | string  | 是     | 自定义输出文件路径。例如:`logs/file.log`。        |
+| log_format       | object  | 否     | 以 JSON 格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 
`$` 开头,则表明是要获取 [APISIX 变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 | include_resp_body      | boolean | 否     | 当设置为 `true` 时,生成的文件包含响应体。         
                                                                                
      |
 | include_resp_body_expr | array   | 否     | 当 `include_resp_body` 属性设置为 
`true` 时,使用该属性并基于 [lua-resty-expr](https://github.com/api7/lua-resty-expr) 
进行过滤。 如果存在,则仅在表达式计算结果为 `true` 时记录响应。       |
 
diff --git a/docs/zh/latest/plugins/google-cloud-logging.md 
b/docs/zh/latest/plugins/google-cloud-logging.md
index 96f885c7e..2fc51a026 100644
--- a/docs/zh/latest/plugins/google-cloud-logging.md
+++ b/docs/zh/latest/plugins/google-cloud-logging.md
@@ -4,7 +4,7 @@ keywords:
   - APISIX
   - API 网关
   - 插件
-  - Splunk
+  - Google Cloud logging
   - 日志
 description: API 网关 Apache APISIX 的 google-cloud-logging 插件可用于将请求日志转发到 Google 
Cloud Logging Service 中进行分析和存储。
 ---
@@ -46,6 +46,7 @@ description: API 网关 Apache APISIX 的 google-cloud-logging 插件可用于
 | ssl_verify              | 否       | true                                     
        | 当设置为 `true` 时,启用 `SSL` 验证。                 |
 | resource                | 否       | {"type": "global"}                       
        | 谷歌监控资源,请参考 
[MonitoredResource](https://cloud.google.com/logging/docs/reference/v2/rest/v2/MonitoredResource)。
             |
 | log_id                  | 否       | apisix.apache.org%2Flogs                 
        | 谷歌日志 ID,请参考 
[LogEntry](https://cloud.google.com/logging/docs/reference/v2/rest/v2/LogEntry)。
                                |
+| log_format              | 否   |                   | 以 JSON 
格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 
变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 
 注意:schema 中还定义了 `encrypt_fields = {"auth_config.private_key"}`,这意味着该字段将会被加密存储在 
etcd 中。具体参考 [加密存储字段](../plugin-develop.md#加密存储字段)。
 
diff --git a/docs/zh/latest/plugins/http-logger.md 
b/docs/zh/latest/plugins/http-logger.md
index 304091f0e..92f8d02ed 100644
--- a/docs/zh/latest/plugins/http-logger.md
+++ b/docs/zh/latest/plugins/http-logger.md
@@ -40,6 +40,7 @@ description: 本文介绍了 API 网关 Apache APISIX 的 http-logger 插件。
 | uri                    | string  | 是     |               |                   
   | HTTP 或 HTTPS 服务器的 URI。                   |
 | auth_header            | string  | 否     |               |                   
   | 授权 header(如果需要)。                                    |
 | timeout                | integer | 否     | 3             | [1,...]           
   | 发送请求后保持连接处于活动状态的时间。           |
+| log_format             | object  | 否     |               |         | 以 JSON 
格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 
变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 | include_req_body       | boolean | 否     | false         | [false, true]     
   | 当设置为 `true` 时,将请求体包含在日志中。如果请求体太大而无法保存在内存中,由于 NGINX 的限制,无法记录。 |
 | include_resp_body      | boolean | 否     | false         | [false, true]     
   | 当设置为 `true` 时,包含响应体。                                                       
                                        |
 | include_resp_body_expr | array   | 否     |               |                   
   | 当 `include_resp_body` 属性设置为 `true` 时,使用该属性并基于 
[lua-resty-expr](https://github.com/api7/lua-resty-expr) 进行过滤。 如果存在,则仅在表达式计算结果为 
`true` 时记录响应。       |
diff --git a/docs/zh/latest/plugins/kafka-logger.md 
b/docs/zh/latest/plugins/kafka-logger.md
index ea68923d7..1ef2129bd 100644
--- a/docs/zh/latest/plugins/kafka-logger.md
+++ b/docs/zh/latest/plugins/kafka-logger.md
@@ -50,6 +50,7 @@ description: API 网关 Apache APISIX 的 kafka-logger 插件用于将日志作
 | timeout                | integer | 否     | 3              | [1,...]          
     | 发送数据的超时时间。                             |
 | name                   | string  | 否     | "kafka logger" |                  
     | batch processor 的唯一标识。                     |
 | meta_format            | enum    | 否     | "default"      | 
["default","origin"] | `default`:获取请求信息以默认的 JSON 编码方式。`origin`:获取请求信息以 HTTP 
原始请求方式。更多信息,请参考 [meta_format](#meta_format-示例)。|
+| log_format             | object  | 否   | |         | 以 JSON 
格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 
变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 | include_req_body       | boolean | 否     | false          | [false, true]    
     | 当设置为 `true` 时,包含请求体。**注意**:如果请求体无法完全存放在内存中,由于 NGINX 的限制,APISIX 无法将它记录下来。|
 | include_req_body_expr  | array   | 否     |                |                  
     | 当 `include_req_body` 属性设置为 `true` 时进行过滤。只有当此处设置的表达式计算结果为 `true` 
时,才会记录请求体。更多信息,请参考 [lua-resty-expr](https://github.com/api7/lua-resty-expr)。 |
 | include_resp_body      | boolean | 否     | false          | [false, true]    
     | 当设置为 `true` 时,包含响应体。 |
diff --git a/docs/zh/latest/plugins/loggly.md b/docs/zh/latest/plugins/loggly.md
index 0fae1f304..e02f640b9 100644
--- a/docs/zh/latest/plugins/loggly.md
+++ b/docs/zh/latest/plugins/loggly.md
@@ -41,6 +41,7 @@ description: API 网关 Apache APISIX loggly 插件可用于将日志转发到 S
 | severity               | string (enum) | 否      | INFO    | Syslog 
日志事件的严重性级别。 包括:`DEBUG`、`INFO`、`NOTICE`、`WARNING`、`ERR`、`CRIT`、`ALERT` 和 
`EMEGR`。                                         |
 | severity_map           | object        | 否      | nil     | 一种将上游 HTTP 
响应代码映射到 Syslog 中的方法。 `key-value`,其中 `key` 是 HTTP 响应代码,`value`是 Syslog 
严重级别。例如`{"410": "CRIT"}`。                |
 | tags                   | array         | 否      |         | 
元数据将包含在任何事件日志中,以帮助进行分段和过滤。                                                      
                                                  |
+| log_format             | object  | 否   |          |         | 以 JSON 
格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 
变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 | include_req_body       | boolean       | 否      | false   | 当设置为 `true` 
时,包含请求体。**注意**:如果请求体无法完全存放在内存中,由于 NGINX 的限制,APISIX 无法将它记录下来。               |
 | include_resp_body      | boolean       | 否      | false   | 当设置为 `true` 
时,包含响应体。                                            |
 | include_resp_body_expr | array         | 否      |         | 当 
`include_resp_body` 属性设置为 `true` 时进行过滤响应体,并且只有当此处设置的表达式计算结果为 `true` 
时,才会记录响应体。更多信息,请参考 [lua-resty-expr](https://github.com/api7/lua-resty-expr)。 |
diff --git a/docs/zh/latest/plugins/rocketmq-logger.md 
b/docs/zh/latest/plugins/rocketmq-logger.md
index 5e952837d..9428e80f5 100644
--- a/docs/zh/latest/plugins/rocketmq-logger.md
+++ b/docs/zh/latest/plugins/rocketmq-logger.md
@@ -39,6 +39,7 @@ description: API 网关 Apache APISIX 的 rocketmq-logger 插件用于将日志
 | topic                  | string  | 是     |                   |               
        | 要推送的 topic 名称。                             |
 | key                    | string  | 否     |                   |               
        | 发送消息的 keys。                                 |
 | tag                    | string  | 否     |                   |               
        | 发送消息的 tags。                                 |
+| log_format             | object  | 否   |          |         | 以 JSON 
格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 
变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 | timeout                | integer | 否     | 3                 | [1,...]       
        | 发送数据的超时时间。                              |
 | use_tls                | boolean | 否     | false             |               
        | 当设置为 `true` 时,开启 TLS 加密。               |
 | access_key             | string  | 否     | ""                |               
        | ACL 认证的 Access key,空字符串表示不开启 ACL。    |
diff --git a/docs/zh/latest/plugins/skywalking-logger.md 
b/docs/zh/latest/plugins/skywalking-logger.md
index 347142b1e..f5ef4bfcf 100644
--- a/docs/zh/latest/plugins/skywalking-logger.md
+++ b/docs/zh/latest/plugins/skywalking-logger.md
@@ -42,6 +42,7 @@ description: 本文将介绍 API 网关 Apache APISIX 如何通过 skywalking-lo
 | endpoint_addr          | string  | 是     |                      |            
   | SkyWalking OAP 服务器的 URI。                                      |
 | service_name           | string  | 否     |"APISIX"              |            
   | SkyWalking 服务名称。                                              |
 | service_instance_name  | string  | 否     |"APISIX Instance Name"|            
   | SkyWalking 服务的实例名称。当设置为 `$hostname`会直接获取本地主机名。 |
+| log_format             | object  | 否   |          |         | 以 JSON 
格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 
变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 | timeout                | integer | 否     | 3                    | [1,...]    
   | 发送请求后保持连接活动的时间。                                       |
 | name                   | string  | 否     | "skywalking logger"  |            
   | 标识 logger 的唯一标识符。                                         |
 | include_req_body       | boolean | 否     | false                | [false, 
true] | 当设置为 `true` 时,将请求正文包含在日志中。                         |
diff --git a/docs/zh/latest/plugins/sls-logger.md 
b/docs/zh/latest/plugins/sls-logger.md
index 1162426f3..b4d21a463 100644
--- a/docs/zh/latest/plugins/sls-logger.md
+++ b/docs/zh/latest/plugins/sls-logger.md
@@ -37,6 +37,7 @@ title: sls-logger
 | host | 必要的 | TCP 服务的 IP 
地址或主机名,请参考:[阿里云日志服务列表](https://help.aliyun.com/document_detail/29008.html?spm=a2c4g.11186623.2.14.49301b4793uX0z#reference-wgx-pwq-zdb),建议配置
 IP 取代配置域名。|
 | port | 必要的 | 目标端口,阿里云日志服务默认端口为 10009。|
 | timeout | 可选的 | 发送数据超时间。|
+| log_format             | 可选的  | 以 JSON 格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 
开头,则表明是要获取 [APISIX 变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 | project | 必要的 | 日志服务 Project 名称,请提前在阿里云日志服务中创建 Project。|
 | logstore | 必须的 | 日志服务 Logstore 名称,请提前在阿里云日志服务中创建 Logstore。|
 | access_key_id | 必须的 | AccessKey ID。建议使用阿里云子账号 AK,详情请参见 
[授权](https://help.aliyun.com/document_detail/47664.html?spm=a2c4g.11186623.2.15.49301b47lfvxXP#task-xsk-ttc-ry)。|
diff --git a/docs/zh/latest/plugins/splunk-hec-logging.md 
b/docs/zh/latest/plugins/splunk-hec-logging.md
index 48cf62c94..905ca3394 100644
--- a/docs/zh/latest/plugins/splunk-hec-logging.md
+++ b/docs/zh/latest/plugins/splunk-hec-logging.md
@@ -44,6 +44,7 @@ description: API 网关 Apache APISIX 的 splunk-hec-logging 插件可用于将
 | endpoint.channel    | 否     |        | Splunk HEC 发送渠道标识,更多信息请参考 [About HTTP 
Event Collector Indexer 
Acknowledgment](https://docs.splunk.com/Documentation/Splunk/8.2.3/Data/AboutHECIDXAck)。
 |
 | endpoint.timeout    | 否     | 10     | Splunk HEC 数据提交超时时间(以秒为单位)。           
                                                                                
                                  |
 | ssl_verify          | 否     | true   | 当设置为 `true` 时,启用 `SSL` 验证。            
                                                                                
                                     |
+| log_format              | 否   |                   | 以 JSON 
格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 
变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 
 本插件支持使用批处理器来聚合并批量处理条目(日志和数据)。这样可以避免该插件频繁地提交数据。默认情况下每 `5` 秒钟或队列中的数据达到 `1000` 
条时,批处理器会自动提交数据,如需了解更多信息或自定义配置,请参考 [Batch-Processor](../batch-processor.md#配置)。
 
diff --git a/docs/zh/latest/plugins/syslog.md b/docs/zh/latest/plugins/syslog.md
index 6d22d7c46..8e16fe69b 100644
--- a/docs/zh/latest/plugins/syslog.md
+++ b/docs/zh/latest/plugins/syslog.md
@@ -48,6 +48,7 @@ description: API 网关 Apache APISIX syslog 插件可用于将日志推送到 S
 | max_retry_count  | integer | 否     |              | [1, ...]      | 
连接到日志服务器失败或将日志消息发送到日志服务器失败后的最大重试次数。                                             
                         |
 | retry_delay      | integer | 否     |              | [0, ...]      | 
重试连接到日志服务器或重试向日志服务器发送日志消息之前的时间延迟(以毫秒为单位)。                                       
                    |
 | pool_size        | integer | 否     | 5            | [5, ...]      | 
`sock:keepalive` 使用的 Keepalive 池大小。                                             
                                                 |
+| log_format             | object  | 否   |          |         | 以 JSON 
格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 
变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 | include_req_body | boolean | 否     | false        |               | 当设置为 
`true` 时包括请求体。                                                                  
                                      |
 
 该插件支持使用批处理器来聚合并批量处理条目(日志/数据)。这样可以避免插件频繁地提交数据,默认情况下批处理器每 `5` 秒钟或队列中的数据达到 `1000` 
条时提交数据,如需了解批处理器相关参数设置,请参考 [Batch-Processor](../batch-processor.md#配置)。
diff --git a/docs/zh/latest/plugins/tcp-logger.md 
b/docs/zh/latest/plugins/tcp-logger.md
index e3ba924ad..e74339d8a 100644
--- a/docs/zh/latest/plugins/tcp-logger.md
+++ b/docs/zh/latest/plugins/tcp-logger.md
@@ -40,6 +40,7 @@ description: 本文介绍了 API 网关 Apache APISIX 如何使用 tcp-logger 
 | host             | string  | 是     |        |         | TCP 服务器的 IP 地址或主机名。  
                   |
 | port             | integer | 是     |        | [0,...] | 目标端口。                
                        |
 | timeout          | integer | 否     | 1000   | [1,...] | 发送数据超时间。             
                      |
+| log_format       | object  | 否   |          |         | 以 JSON 
格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 
变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 | tls              | boolean | 否     | false  |         | 用于控制是否执行 SSL 验证。     
                   |
 | tls_options      | string  | 否     |        |         | TLS 选项。              
                          |
 | include_req_body | boolean | 否     |        |         | 当设置为 `true` 
时,日志中将包含请求体。           |
diff --git a/docs/zh/latest/plugins/tencent-cloud-cls.md 
b/docs/zh/latest/plugins/tencent-cloud-cls.md
index a94791057..8fa3e5f7c 100644
--- a/docs/zh/latest/plugins/tencent-cloud-cls.md
+++ b/docs/zh/latest/plugins/tencent-cloud-cls.md
@@ -44,6 +44,7 @@ description: API 网关 Apache APISIX tencent-cloud-cls 插件可用于将日志
 | include_req_body  | boolean | 否     | false | [false, true]| 当设置为 `true` 
时,日志中将包含请求体。                                                     |
 | include_resp_body | boolean | 否     | false | [false, true]| 当设置为 `true` 
时,日志中将包含响应体。                                                     |
 | global_tag        | object  | 否     |       |              | kv 形式的 JSON 
数据,可以写入每一条日志,便于在 CLS 中检索。                                        |
+| log_format        | object  | 否   |          |         | 以 JSON 
格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 
变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 
 注意:schema 中还定义了 `encrypt_fields = {"secret_key"}`,这意味着该字段将会被加密存储在 etcd 中。具体参考 
[加密存储字段](../plugin-develop.md#加密存储字段)。
 
diff --git a/docs/zh/latest/plugins/udp-logger.md 
b/docs/zh/latest/plugins/udp-logger.md
index 797967ed5..d9fd580c2 100644
--- a/docs/zh/latest/plugins/udp-logger.md
+++ b/docs/zh/latest/plugins/udp-logger.md
@@ -40,6 +40,7 @@ description: 本文介绍了 API 网关 Apache APISIX 如何使用 udp-logger 
 | host             | string  | 是     |              |         | UDP 服务的 IP 
地址或主机名。                       |
 | port             | integer | 是     |              | [0,...] | 目标端口。          
                               |
 | timeout          | integer | 否     | 1000         | [1,...] | 发送数据超时间。       
                            |
+| log_format       | object  | 否   |          |         | 以 JSON 
格式的键值对来声明日志格式。对于值部分,仅支持字符串。如果是以 `$` 开头,则表明是要获取 [APISIX 
变量](../apisix-variable.md) 或 [NGINX 
内置变量](http://nginx.org/en/docs/varindex.html)。 |
 | name             | string  | 否     | "udp logger" |         | 用于识别批处理器。      
                           |
 | include_req_body | boolean | 否     |              |         | 当设置为 `true` 
时,日志中将包含请求体。           |
 
diff --git a/t/plugin/clickhouse-logger.t b/t/plugin/clickhouse-logger.t
index ed2f0dc29..3aa28190a 100644
--- a/t/plugin/clickhouse-logger.t
+++ b/t/plugin/clickhouse-logger.t
@@ -225,3 +225,59 @@ clickhouse headers: x-clickhouse-key:a
 clickhouse headers: x-clickhouse-user:default
 clickhouse headers: x-clickhouse-database:default
 --- wait: 5
+
+
+
+=== TEST 7: log format in plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/routes/1',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "plugins": {
+                            "clickhouse-logger": {
+                                "user": "default",
+                                "password": "a",
+                                "database": "default",
+                                "logtable": "t",
+                                "endpoint_addrs": 
["http://127.0.0.1:10420/clickhouse-logger/test1";],
+                                "log_format": {
+                                    "vip": "$remote_addr"
+                                },
+                                "batch_max_size":1,
+                                "inactive_timeout":1
+                            }
+                        },
+                        "upstream": {
+                            "nodes": {
+                                "127.0.0.1:1980": 1
+                            },
+                            "type": "roundrobin"
+                        },
+                        "uri": "/hello"
+                }]]
+                )
+
+            if code >= 300 then
+                ngx.status = code
+            end
+            ngx.say(body)
+        }
+    }
+--- request
+GET /t
+--- response_body
+passed
+
+
+
+=== TEST 8: hit route and report logger
+--- request
+GET /hello
+--- response_body
+hello world
+--- wait: 1.5
+--- error_log eval
+qr/clickhouse body: INSERT INTO t FORMAT JSONEachRow \{.*"vip":"127.0.0.1".*\}/
diff --git a/t/plugin/elasticsearch-logger.t b/t/plugin/elasticsearch-logger.t
index 7da890797..ba9b6bfd9 100644
--- a/t/plugin/elasticsearch-logger.t
+++ b/t/plugin/elasticsearch-logger.t
@@ -582,3 +582,89 @@ passed
 --- error_log
 http://127.0.0.1:9200/_bulk
 http://127.0.0.1:9201/_bulk
+
+
+
+=== TEST 15: log format in plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, {
+                uri = "/hello",
+                upstream = {
+                    type = "roundrobin",
+                    nodes = {
+                        ["127.0.0.1:1980"] = 1
+                    }
+                },
+                plugins = {
+                    ["elasticsearch-logger"] = {
+                        endpoint_addr = "http://127.0.0.1:9201";,
+                        field = {
+                            index = "services"
+                        },
+                        log_format = {
+                            custom_host = "$host"
+                        },
+                        batch_max_size = 1,
+                        inactive_timeout = 1
+                    }
+                }
+            })
+
+            if code >= 300 then
+                ngx.status = code
+            end
+            ngx.say(body)
+        }
+    }
+--- response_body
+passed
+
+
+
+=== TEST 16: hit route and check custom elasticsearch logger
+--- extra_init_by_lua
+    local core = require("apisix.core")
+    local http = require("resty.http")
+    local ngx_re = require("ngx.re")
+    local log_util = require("apisix.utils.log-util")
+    log_util.inject_get_custom_format_log(function(ctx, format)
+        return {
+            test = "test"
+        }
+    end)
+
+    http.request_uri = function(self, uri, params)
+        if not params.body or type(params.body) ~= "string" then
+            return nil, "invalid params body"
+        end
+
+        local arr = ngx_re.split(params.body, "\n")
+        if not arr or #arr ~= 2 then
+            return nil, "invalid params body"
+        end
+
+        local entry = core.json.decode(arr[2])
+        local origin_entry = log_util.get_custom_format_log(nil, nil)
+        for k, v in pairs(origin_entry) do
+            local vv = entry[k]
+            if not vv or vv ~= v then
+                return nil, "invalid params body"
+            end
+        end
+
+        core.log.error("check elasticsearch custom body success")
+        return {
+            status = 200,
+            body = "success"
+        }, nil
+    end
+--- request
+GET /hello
+--- response_body
+hello world
+--- wait: 2
+--- error_log
+check elasticsearch custom body success
diff --git a/t/plugin/file-logger.t b/t/plugin/file-logger.t
index 1d16befb3..30b9fc18a 100644
--- a/t/plugin/file-logger.t
+++ b/t/plugin/file-logger.t
@@ -207,3 +207,134 @@ write file log success
     }
 --- error_log
 failed to open file: /log/file.log, error info: /log/file.log: No such file or 
directory
+
+
+
+=== TEST 6: log format in plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            -- ensure the format is not set
+            t('/apisix/admin/plugin_metadata/file-logger',
+                ngx.HTTP_DELETE
+            )
+            local code, body = t('/apisix/admin/routes/1',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "plugins": {
+                            "file-logger": {
+                                "path": "file.log",
+                                "log_format": {
+                                    "host": "$host",
+                                    "client_ip": "$remote_addr"
+                                }
+                            }
+                        },
+                        "upstream": {
+                            "nodes": {
+                                "127.0.0.1:1982": 1
+                            },
+                            "type": "roundrobin"
+                        },
+                        "uri": "/hello"
+                }]]
+                )
+
+            if code >= 300 then
+                ngx.status = code
+            end
+            ngx.say(body)
+        }
+    }
+--- response_body
+passed
+
+
+
+=== TEST 7: verify plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local core = require("apisix.core")
+            local t = require("lib.test_admin").test
+            local code = t("/hello", ngx.HTTP_GET)
+            local fd, err = io.open("file.log", 'r')
+            local msg
+
+            if not fd then
+                core.log.error("failed to open file: file.log, error info: ", 
err)
+                return
+            end
+
+            msg = fd:read()
+
+            local new_msg = core.json.decode(msg)
+            if new_msg.client_ip == '127.0.0.1' and new_msg.route_id == '1'
+                and new_msg.host == '127.0.0.1'
+            then
+                msg = "write file log success"
+                ngx.status = code
+                ngx.say(msg)
+            end
+        }
+    }
+--- response_body
+write file log success
+
+
+
+=== TEST 8: add plugin metadata
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/plugin_metadata/file-logger',
+                ngx.HTTP_PUT,
+                [[{
+                    "log_format": {
+                        "host": "$host"
+                    }
+                }]]
+                )
+
+            if code >= 300 then
+                ngx.status = code
+            end
+            ngx.say(body)
+        }
+    }
+--- response_body
+passed
+
+
+
+=== TEST 9: ensure config in plugin is prior to the one in plugin metadata
+--- config
+    location /t {
+        content_by_lua_block {
+            local core = require("apisix.core")
+            local t = require("lib.test_admin").test
+            local code = t("/hello", ngx.HTTP_GET)
+            local fd, err = io.open("file.log", 'r')
+            local msg
+
+            if not fd then
+                core.log.error("failed to open file: file.log, error info: ", 
err)
+                return
+            end
+
+            msg = fd:read()
+
+            local new_msg = core.json.decode(msg)
+            if new_msg.client_ip == '127.0.0.1' and new_msg.route_id == '1'
+                and new_msg.host == '127.0.0.1'
+            then
+                msg = "write file log success"
+                ngx.status = code
+                ngx.say(msg)
+            end
+        }
+    }
+--- response_body
+write file log success
diff --git a/t/plugin/google-cloud-logging2.t b/t/plugin/google-cloud-logging2.t
index 0b8be32df..4290d3fc8 100644
--- a/t/plugin/google-cloud-logging2.t
+++ b/t/plugin/google-cloud-logging2.t
@@ -332,3 +332,107 @@ the mock backend is hit
 --- error_code: 400
 --- response_body
 {"error_msg":"invalid configuration: property \"log_format\" validation 
failed: wrong type: expected object, got string"}
+
+
+
+=== TEST 6: set route to test custom log format in route
+--- config
+    location /t {
+        content_by_lua_block {
+            local config = {
+                uri = "/hello",
+                upstream = {
+                    type = "roundrobin",
+                    nodes = {
+                        ["127.0.0.1:1980"] = 1
+                    }
+                },
+                plugins = {
+                    ["google-cloud-logging"] = {
+                        auth_config = {
+                            private_key = [[
+-----BEGIN PRIVATE KEY-----
+MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDDzrFwnA3EvYyR
+aeMgaLD3hBjvxKrz10uox1X8q7YYhf2ViRtLRUMa2bEMYksE5hbhwpNf6mKAnLOC
+UuAT6cPPdUl/agKpJXviBPIR2LuzD17WsLJHp1HxUDssSkgfCaGcOGGNfLUhhIpF
+2JUctLmxiZoAZySlSjcwupSuDJ0aPm0XO8r9H8Qu5kF2Vkz5e5bFivLTmvzrQTe4
+v5V1UI6hThElCSeUmdNF3uG3wopxlvq4zXgLTnuLbrNf/Gc4mlpV+UDgTISj32Ep
+AB2vxKEbvQw4ti8YJnGXWjxLerhfrszFw+V8lpeduiDYA44ZFoVqvzxeIsVZNtcw
+Iu7PvEPNAgMBAAECggEAVpyN9m7A1F631/aLheFpLgMbeKt4puV7zQtnaJ2XrZ9P
+PR7pmNDpTu4uF3k/D8qrIm+L+uhVa+hkquf3wDct6w1JVnfQ93riImbnoKdK13ic
+DcEZCwLjByfjFMNCxZ/gAZca55fbExlqhFy6EHmMjhB8s2LsXcTHRuGxNI/Vyi49
+sxECibe0U53aqdJbVWrphIS67cpwl4TUkN6mrHsNuDYNJ9dgkpapoqp4FTFQsBqC
+afOK5qgJ68dWZ47FBUng+AZjdCncqAIuJxxItGVQP6YPsFs+OXcivIVHJr363TpC
+l85FfdvqWV5OGBbwSKhNwiTNUVvfSQVmtURGWG/HbQKBgQD4gZ1z9+Lx19kT9WTz
+lw93lxso++uhAPDTKviyWSRoEe5aN3LCd4My+/Aj+sk4ON/s2BV3ska5Im93j+vC
+rCv3uPn1n2jUhWuJ3bDqipeTW4n/CQA2m/8vd26TMk22yOkkqw2MIA8sjJ//SD7g
+tdG7up6DgGMP4hgbO89uGU7DAwKBgQDJtkKd0grh3u52Foeh9YaiAgYRwc65IE16
+UyD1OJxIuX/dYQDLlo5KyyngFa1ZhWIs7qC7r3xXH+10kfJY+Q+5YMjmZjlL8SR1
+Ujqd02R9F2//6OeswyReachJZbZdtiEw3lPa4jVFYfhSe0M2ZPxMwvoXb25eyCNI
+1lYjSKq87wKBgHnLTNghjeDp4UKe6rNYPgRm0rDrhziJtX5JeUov1mALKb6dnmkh
+GfRK9g8sQqKDfXwfC6Z2gaMK9YaryujGaWYoCpoPXtmJ6oLPXH4XHuLh4mhUiP46
+xn8FEfSimuQS4/FMxH8A128GHQSI7AhGFFzlwfrBWcvXC+mNDsTvMmLxAoGARc+4
+upppfccETQZ7JsitMgD1TMwA2f2eEwoWTAitvlXFNT9PYSbYVHaAJbga6PLLCbYF
+FzAjHpxEOKYSdEyu7n/ayDL0/Z2V+qzc8KarDsg/0RgwppBbU/nUgeKb/U79qcYo
+y4ai3UKNCS70Ei1dTMvmdpnwXwlxfNIBufB6dy0CgYBMYq9Lc31GkC6PcGEEbx6W
+vjImOadWZbuOVnvEQjb5XCdcOsWsMcg96PtoeuyyHmhnEF1GsMzcIdQv/PHrvYpK
+Yp8D0aqsLEgwGrJQER26FPpKmyIwvcL+nm6q5W31PnU9AOC/WEkB6Zs58hsMzD2S
+kEJQcmfVew5mFXyxuEn3zA==
+-----END PRIVATE KEY-----]],
+                            project_id = "apisix",
+                            token_uri = 
"http://127.0.0.1:1980/google/logging/token";,
+                            scopes = {
+                                "https://apisix.apache.org/logs:admin";
+                            },
+                            entries_uri = 
"http://127.0.0.1:1980/google/logging/entries";,
+                        },
+                        log_format = {
+                            host = "$host",
+                            ["@timestamp"] = "$time_iso8601",
+                            vip = "$remote_addr"
+                        },
+                        inactive_timeout = 1,
+                        batch_max_size = 1,
+                    }
+                }
+            }
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, 
config)
+
+            if code >= 300 then
+                ngx.status = code
+                ngx.say(body)
+                return
+            end
+
+            ngx.say(body)
+        }
+    }
+--- response_body
+passed
+
+
+
+=== TEST 7: hit
+--- extra_init_by_lua
+    local decode = require("toolkit.json").decode
+    local up = require("lib.server")
+    up.google_logging_entries = function()
+        ngx.log(ngx.WARN, "the mock backend is hit")
+
+        ngx.req.read_body()
+        local data = ngx.req.get_body_data()
+        data = decode(data)
+        assert(data.entries[1].jsonPayload.vip == "127.0.0.1")
+        assert(data.entries[1].resource.type == "global")
+        ngx.say('{}')
+    end
+--- request
+GET /hello
+--- wait: 2
+--- response_body
+hello world
+--- error_log
+the mock backend is hit
+--- no_error_log
+[error]
diff --git a/t/plugin/http-logger-log-format.t 
b/t/plugin/http-logger-log-format.t
index 1ce670add..d2c6daa9e 100644
--- a/t/plugin/http-logger-log-format.t
+++ b/t/plugin/http-logger-log-format.t
@@ -502,3 +502,59 @@ hello world
 --- wait: 0.5
 --- error_log eval
 qr/request log: 
\{"client_ip":"127.0.0.1","host":"localhost","labels":\{"k":"v"\},"route_id":"1"\}/
+
+
+
+=== TEST 17: log format in plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/routes/1',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "plugins": {
+                            "http-logger": {
+                                "uri": "http://127.0.0.1:1980/log";,
+                                "batch_max_size": 1,
+                                "max_retry_count": 1,
+                                "retry_delay": 2,
+                                "buffer_duration": 2,
+                                "inactive_timeout": 2,
+                                "concat_method": "new_line",
+                                "log_format": {
+                                    "x_ip": "$remote_addr"
+                                }
+                            }
+                        },
+                        "upstream": {
+                            "nodes": {
+                                "127.0.0.1:1982": 1
+                            },
+                            "type": "roundrobin"
+                        },
+                        "uri": "/hello"
+                }]]
+                )
+
+            if code >= 300 then
+                ngx.status = code
+            end
+            ngx.say(body)
+        }
+    }
+--- request
+GET /t
+--- response_body
+passed
+
+
+
+=== TEST 18: hit route and report http logger
+--- request
+GET /hello
+--- response_body
+hello world
+--- wait: 0.5
+--- error_log eval
+qr/request log: \{.*"x_ip":"127.0.0.1".*\}/
diff --git a/t/plugin/kafka-logger-log-format.t 
b/t/plugin/kafka-logger-log-format.t
index 354ae1803..95321826c 100644
--- a/t/plugin/kafka-logger-log-format.t
+++ b/t/plugin/kafka-logger-log-format.t
@@ -105,3 +105,59 @@ hello world
 --- wait: 0.5
 --- error_log eval
 qr/send data to kafka: \{.*"host":"localhost"/
+
+
+
+=== TEST 4: log format in plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/routes/1',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "plugins": {
+                            "kafka-logger": {
+                                "broker_list" : {
+                                    "127.0.0.1":9092
+                                },
+                                "log_format": {
+                                    "x_ip": "$remote_addr"
+                                },
+                                "kafka_topic" : "test2",
+                                "key" : "key1",
+                                "timeout" : 1,
+                                "batch_max_size": 1
+                            }
+                        },
+                        "upstream": {
+                            "nodes": {
+                                "127.0.0.1:1980": 1
+                            },
+                            "type": "roundrobin"
+                        },
+                        "uri": "/hello"
+                }]]
+                )
+
+            if code >= 300 then
+                ngx.status = code
+            end
+            ngx.say(body)
+        }
+    }
+--- request
+GET /t
+--- response_body
+passed
+
+
+
+=== TEST 5: hit route and report kafka logger
+--- request
+GET /hello
+--- response_body
+hello world
+--- wait: 0.5
+--- error_log eval
+qr/send data to kafka: \{.*"x_ip":"127.0.0.1".*\}/
diff --git a/t/plugin/loggly.t b/t/plugin/loggly.t
index 7a26c8e79..f8d2f021d 100644
--- a/t/plugin/loggly.t
+++ b/t/plugin/loggly.t
@@ -669,3 +669,73 @@ service temporarily unavailable
 qr/message received: [ -~]+/
 --- grep_error_log_out eval
 qr/message received: <11>1 [\d\-T:.]+Z [\d.]+ apisix [\d]+ - \[tok\@41058 
tag="apisix"] \{"route_id":"1"\}/
+
+
+
+=== TEST 16: log format in plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/plugin_metadata/loggly',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "host":"127.0.0.1",
+                        "port": 8126,
+                        "log_format":{
+                            "client":"$remote_addr"
+                        }
+                }]]
+            )
+
+            if code >= 300 then
+                ngx.status = code
+                ngx.say("fail")
+                return
+            end
+
+            local code, body = t('/apisix/admin/routes/1',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "plugins": {
+                            "loggly": {
+                                "customer_token" : "tok",
+                                "log_format":{
+                                    "host":"$host",
+                                    "client":"$remote_addr"
+                                },
+                                "batch_max_size": 1,
+                                "inactive_timeout": 1
+                            }
+                        },
+                        "upstream": {
+                            "nodes": {
+                                "127.0.0.1:1982": 1
+                            },
+                            "type": "roundrobin"
+                        },
+                        "uri": "/opentracing"
+                }]]
+            )
+
+            if code >= 300 then
+                ngx.status = code
+            end
+            ngx.say(body)
+        }
+    }
+--- response_body
+passed
+
+
+
+=== TEST 17: hit
+--- request
+GET /opentracing?foo=bar
+--- response_body
+opentracing
+--- wait: 0.5
+--- grep_error_log eval
+qr/message received: [ -~]+/
+--- grep_error_log_out eval
+qr/message received: <14>1 [\d\-T:.]+Z \w+ apisix [\d]+ - \[tok\@41058 
tag="apisix"] \{"client":"[\d.]+","host":"\w+","route_id":"1"\}/
diff --git a/t/plugin/rocketmq-logger-log-format.t 
b/t/plugin/rocketmq-logger-log-format.t
index 5ddbd5446..e7512dbc3 100644
--- a/t/plugin/rocketmq-logger-log-format.t
+++ b/t/plugin/rocketmq-logger-log-format.t
@@ -103,3 +103,58 @@ hello world
 --- wait: 0.5
 --- error_log eval
 qr/send data to rocketmq: \{.*"host":"localhost"/
+
+
+
+=== TEST 4: log format in plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/routes/1',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "plugins": {
+                            "rocketmq-logger": {
+                                "nameserver_list" : [ "127.0.0.1:9876" ],
+                                "topic" : "test2",
+                                "key" : "key1",
+                                "tag" : "tag1",
+                                "log_format": {
+                                    "x_ip": "$remote_addr"
+                                },
+                                "timeout" : 1,
+                                "batch_max_size": 1
+                            }
+                        },
+                        "upstream": {
+                            "nodes": {
+                                "127.0.0.1:1980": 1
+                            },
+                            "type": "roundrobin"
+                        },
+                        "uri": "/hello"
+                }]]
+                )
+
+            if code >= 300 then
+                ngx.status = code
+            end
+            ngx.say(body)
+        }
+    }
+--- request
+GET /t
+--- response_body
+passed
+
+
+
+=== TEST 5: hit route and report logger
+--- request
+GET /hello
+--- response_body
+hello world
+--- wait: 0.5
+--- error_log eval
+qr/send data to rocketmq: \{.*"x_ip":"127.0.0.1".*\}/
diff --git a/t/plugin/skywalking-logger.t b/t/plugin/skywalking-logger.t
index 0f3f07848..8d9bdeda7 100644
--- a/t/plugin/skywalking-logger.t
+++ b/t/plugin/skywalking-logger.t
@@ -228,3 +228,56 @@ opentracing
 --- error_log eval
 
qr/.*\{\\\"json\\\":\\\"\{(\\\\\\\"\@timestamp\\\\\\\":\\\\\\\".*\\\\\\\"|\\\\\\\"client_ip\\\\\\\":\\\\\\\"127\.0\.0\.1\\\\\\\"|\\\\\\\"host\\\\\\\":\\\\\\\"localhost\\\\\\\"|\\\\\\\"route_id\\\\\\\":\\\\\\\"1\\\\\\\"|,){7}\}/
 --- wait: 0.5
+
+
+
+=== TEST 10: log format in plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/routes/1',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "plugins": {
+                            "skywalking-logger": {
+                                "endpoint_addr": "http://127.0.0.1:1986";,
+                                "log_format": {
+                                    "my_ip": "$remote_addr"
+                                },
+                                "batch_max_size": 1,
+                                "max_retry_count": 1,
+                                "retry_delay": 2,
+                                "buffer_duration": 2,
+                                "inactive_timeout": 2
+                            }
+                        },
+                        "upstream": {
+                            "nodes": {
+                                "127.0.0.1:1982": 1
+                            },
+                            "type": "roundrobin"
+                        },
+                        "uri": "/opentracing"
+                }]]
+                )
+
+            if code >= 300 then
+                ngx.status = code
+            end
+            ngx.say(body)
+        }
+    }
+--- response_body
+passed
+
+
+
+=== TEST 11: access local server and test log format
+--- request
+GET /opentracing
+--- response_body
+opentracing
+--- error_log eval
+qr/.*\{\\\"json\\\":.*\\\\\\"my_ip\\\\\\":\\\\\\"127\.0\.0\.1\\\\\\".*\}/
+--- wait: 0.5
diff --git a/t/plugin/sls-logger.t b/t/plugin/sls-logger.t
index fa3e9c43c..a56d6121f 100644
--- a/t/plugin/sls-logger.t
+++ b/t/plugin/sls-logger.t
@@ -323,3 +323,65 @@ apisix:
 --- response_body
 your_access_key_secret
 1T6nR0fz4yhz/zTuRTvt7Xu3c9ASelDXG2//e/A5OiA=
+
+
+
+=== TEST 12: log format in plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/routes/1',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "plugins": {
+                            "sls-logger": {
+                                "host": "100.100.99.135",
+                                "port": 10009,
+                                "project": "your_project",
+                                "logstore": "your_logstore",
+                                "access_key_id": "your_access_key_id",
+                                "access_key_secret": "your_access_key_secret",
+                                "log_format": {
+                                    "vip": "$remote_addr"
+                                },
+                                "timeout": 30000
+                            }
+                        },
+                        "upstream": {
+                            "nodes": {
+                                "127.0.0.1:1980": 1
+                            },
+                            "type": "roundrobin"
+                        },
+                        "uri": "/hello"
+                }]]
+                )
+
+            if code >= 300 then
+                ngx.status = code
+            end
+            ngx.say(body)
+        }
+    }
+--- response_body
+passed
+
+
+
+=== TEST 13: access
+--- extra_init_by_lua
+    local json = require("toolkit.json")
+    local rfc5424 = require("apisix.plugins.slslog.rfc5424")
+    local old_f = rfc5424.encode
+    rfc5424.encode = function(facility, severity, hostname, appname, pid, 
project,
+                   logstore, access_key_id, access_key_secret, msg)
+        local r = json.decode(msg)
+        assert(r.vip == "127.0.0.1", r.vip)
+        return old_f(facility, severity, hostname, appname, pid, project,
+                     logstore, access_key_id, access_key_secret, msg)
+    end
+--- request
+GET /hello
+--- response_body
+hello world
diff --git a/t/plugin/splunk-hec-logging.t b/t/plugin/splunk-hec-logging.t
index afd462a37..78d6e9689 100644
--- a/t/plugin/splunk-hec-logging.t
+++ b/t/plugin/splunk-hec-logging.t
@@ -301,3 +301,77 @@ hello world
 the mock backend is hit
 --- no_error_log
 [error]
+
+
+
+=== TEST 9: set route to test custom log format in route
+--- config
+    location /t {
+        content_by_lua_block {
+            local config = {
+                uri = "/hello",
+                upstream = {
+                    type = "roundrobin",
+                    nodes = {
+                        ["127.0.0.1:1980"] = 1
+                    }
+                },
+                plugins = {
+                    ["splunk-hec-logging"] = {
+                        endpoint = {
+                            uri = "http://127.0.0.1:1980/splunk_hec_logging";,
+                            token = "BD274822-96AA-4DA6-90EC-18940FB2414C"
+                        },
+                        log_format = {
+                            host = "$host",
+                            ["@timestamp"] = "$time_iso8601",
+                            vip = "$remote_addr"
+                        },
+                        batch_max_size = 1,
+                        inactive_timeout = 1
+                    }
+                }
+            }
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, 
config)
+
+            if code >= 300 then
+                ngx.status = code
+                ngx.say(body)
+                return
+            end
+
+            ngx.say(body)
+        }
+    }
+--- response_body
+passed
+
+
+
+=== TEST 10: hit
+--- extra_init_by_lua
+    local core = require("apisix.core")
+    local decode = require("toolkit.json").decode
+    local up = require("lib.server")
+    up.splunk_hec_logging = function()
+        ngx.log(ngx.WARN, "the mock backend is hit")
+
+        ngx.req.read_body()
+        local data = ngx.req.get_body_data()
+        ngx.log(ngx.WARN, data)
+        data = decode(data)
+        assert(data[1].event.vip == "127.0.0.1")
+        assert(data[1].source == "apache-apisix-splunk-hec-logging")
+        assert(data[1].host == core.utils.gethostname())
+        ngx.say('{}')
+    end
+--- request
+GET /hello
+--- wait: 2
+--- response_body
+hello world
+--- error_log
+the mock backend is hit
+--- no_error_log
+[error]
diff --git a/t/plugin/syslog.t b/t/plugin/syslog.t
index 07ed7f115..a8795bc24 100644
--- a/t/plugin/syslog.t
+++ b/t/plugin/syslog.t
@@ -413,3 +413,71 @@ hello world
 [error]
 --- error_log eval
 qr/syslog-log-format.*\{.*"upstream":"127.0.0.1:\d+"/
+
+
+
+=== TEST 12: log format in plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/routes/1',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "plugins": {
+                            "syslog": {
+                                "batch_max_size": 1,
+                                "flush_limit": 1,
+                                "log_format": {
+                                    "vip": "$remote_addr"
+                                },
+                                "host" : "127.0.0.1",
+                                "port" : 5050
+                            }
+                        },
+                        "upstream": {
+                            "nodes": {
+                                "127.0.0.1:1980": 1
+                            },
+                            "type": "roundrobin"
+                        },
+                        "uri": "/hello"
+                }]]
+                )
+
+            if code >= 300 then
+                ngx.status = code
+                ngx.say(body)
+                return
+            end
+
+            ngx.say(body)
+        }
+    }
+--- request
+GET /t
+--- response_body
+passed
+
+
+
+=== TEST 13: access
+--- extra_init_by_lua
+    local syslog = require("apisix.plugins.syslog.init")
+    local json = require("apisix.core.json")
+    local log = require("apisix.core.log")
+    local old_f = syslog.push_entry
+    syslog.push_entry = function(conf, ctx, entry)
+        assert(entry.vip == "127.0.0.1")
+        log.info("push_entry is called with data: ", json.encode(entry))
+        return old_f(conf, ctx, entry)
+    end
+--- request
+GET /hello
+--- response_body
+hello world
+--- wait: 0.5
+--- no_error_log
+[error]
+--- error_log
+push_entry is called with data
diff --git a/t/plugin/tcp-logger.t b/t/plugin/tcp-logger.t
index f4ee66823..0d15b5692 100644
--- a/t/plugin/tcp-logger.t
+++ b/t/plugin/tcp-logger.t
@@ -396,3 +396,89 @@ hello world
 the mock backend is hit
 --- no_error_log
 [error]
+
+
+
+=== TEST 11: log format in plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/routes/1',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "plugins": {
+                            "tcp-logger": {
+                                "host": "127.0.0.1",
+                                "port": 8125,
+                                "tls": false,
+                                "log_format": {
+                                    "vip": "$remote_addr"
+                                },
+                                "batch_max_size": 1,
+                                "inactive_timeout": 1
+                            }
+                        },
+                        "upstream": {
+                            "nodes": {
+                                "127.0.0.1:1980": 1
+                            },
+                            "type": "roundrobin"
+                        },
+                        "uri": "/hello"
+                }]]
+                )
+
+            if code >= 300 then
+                ngx.status = code
+                ngx.say(body)
+                return
+            end
+
+            ngx.say(body)
+        }
+    }
+--- request
+GET /t
+--- response_body
+passed
+
+
+
+=== TEST 12: access
+--- stream_conf_enable
+--- extra_stream_config
+    server {
+        listen 8125;
+        content_by_lua_block {
+            local decode = require("toolkit.json").decode
+            ngx.log(ngx.WARN, "the mock backend is hit")
+
+            local sock, err = ngx.req.socket(true)
+            if not sock then
+                ngx.log(ngx.ERR, "failed to get the request socket: ", err)
+                return
+            end
+
+            local data, err = sock:receive('*a')
+
+            if not data then
+                if err and err ~= "closed" then
+                    ngx.log(ngx.ERR, "socket error, returning: ", err)
+                end
+                return
+            end
+
+            data = decode(data)
+            assert(data.vip == "127.0.0.1")
+        }
+    }
+--- request
+GET /hello
+--- response_body
+hello world
+--- wait: 2
+--- error_log
+the mock backend is hit
+--- no_error_log
+[error]
diff --git a/t/plugin/tencent-cloud-cls.t b/t/plugin/tencent-cloud-cls.t
index b16e5c124..fff3bc129 100644
--- a/t/plugin/tencent-cloud-cls.t
+++ b/t/plugin/tencent-cloud-cls.t
@@ -409,3 +409,94 @@ apisix:
 --- response_body
 secret_key
 oshn8tcqE8cJArmEILVNPQ==
+
+
+
+=== TEST 13: log format in plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local plugin = require("apisix.plugins.tencent-cloud-cls")
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/routes/1',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "plugins": {
+                            "tencent-cloud-cls": {
+                                "cls_host": "127.0.0.1:10421",
+                                "cls_topic": 
"143b5d70-139b-4aec-b54e-bb97756916de",
+                                "secret_id": "secret_id",
+                                "secret_key": "secret_key",
+                                "batch_max_size": 1,
+                                "max_retry_count": 1,
+                                "inactive_timeout": 1,
+                                "log_format": {
+                                    "host": "$host",
+                                    "@timestamp": "$time_iso8601",
+                                    "vip": "$remote_addr"
+                                }
+                            }
+                        },
+                        "upstream": {
+                            "nodes": {
+                                "127.0.0.1:1982": 1
+                            },
+                            "type": "roundrobin"
+                        },
+                        "uri": "/opentracing"
+                }]]
+                )
+            if code >= 300 then
+                ngx.status = code
+                ngx.say(body)
+                return
+            end
+
+            ngx.say(body)
+        }
+    }
+--- response_body
+passed
+
+
+
+=== TEST 14: log use log_format
+--- extra_init_by_lua
+    local cls = require("apisix.plugins.tencent-cloud-cls.cls-sdk")
+    cls.send_cls_request = function(self, pb_obj)
+        if (#pb_obj.logGroupList ~= 1) then
+            ngx.log(ngx.ERR, "unexpected logGroupList length: ", 
#pb_obj.logGroupList)
+            return false
+        end
+        local log_group = pb_obj.logGroupList[1]
+        if #log_group.logs ~= 1 then
+            ngx.log(ngx.ERR, "unexpected logs length: ", #log_group.logs)
+            return false
+        end
+        local log = log_group.logs[1]
+        if #log.contents == 0 then
+            ngx.log(ngx.ERR, "unexpected contents length: ", #log.contents)
+            return false
+        end
+        local has_host, has_timestamp, has_vip = false, false, false
+        for i, tag in ipairs(log.contents) do
+            if tag.key == "host" then
+                has_host = true
+            end
+            if tag.key == "@timestamp" then
+                has_timestamp = true
+            end
+            if tag.key == "vip" then
+                has_vip = true
+            end
+        end
+        if not(has_host and has_timestamp and has_vip) then
+            return false
+        end
+        return true
+    end
+--- request
+GET /opentracing
+--- response_body
+opentracing
+--- wait: 0.5
diff --git a/t/plugin/udp-logger.t b/t/plugin/udp-logger.t
index 7f660bb6c..288c3e512 100644
--- a/t/plugin/udp-logger.t
+++ b/t/plugin/udp-logger.t
@@ -393,3 +393,89 @@ hello world
 the mock backend is hit
 --- no_error_log
 [error]
+
+
+
+=== TEST 11: log format in plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/routes/1',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "plugins": {
+                            "udp-logger": {
+                                "host": "127.0.0.1",
+                                "port": 8125,
+                                "tls": false,
+                                "log_format": {
+                                    "vip": "$remote_addr"
+                                },
+                                "batch_max_size": 1,
+                                "inactive_timeout": 1
+                            }
+                        },
+                        "upstream": {
+                            "nodes": {
+                                "127.0.0.1:1980": 1
+                            },
+                            "type": "roundrobin"
+                        },
+                        "uri": "/hello"
+                }]]
+                )
+
+            if code >= 300 then
+                ngx.status = code
+                ngx.say(body)
+                return
+            end
+
+            ngx.say(body)
+        }
+    }
+--- request
+GET /t
+--- response_body
+passed
+
+
+
+=== TEST 12: access
+--- stream_conf_enable
+--- extra_stream_config
+    server {
+        listen 8125 udp;
+        content_by_lua_block {
+            local decode = require("toolkit.json").decode
+            ngx.log(ngx.WARN, "the mock backend is hit")
+
+            local sock, err = ngx.req.socket(true)
+            if not sock then
+                ngx.log(ngx.ERR, "failed to get the request socket: ", err)
+                return
+            end
+
+            local data, err = sock:receive()
+
+            if not data then
+                if err and err ~= "no more data" then
+                    ngx.log(ngx.ERR, "socket error, returning: ", err)
+                end
+                return
+            end
+
+            data = decode(data)
+            assert(data.vip == "127.0.0.1")
+        }
+    }
+--- request
+GET /hello
+--- response_body
+hello world
+--- wait: 2
+--- error_log
+the mock backend is hit
+--- no_error_log
+[error]
diff --git a/t/stream-plugin/syslog.t b/t/stream-plugin/syslog.t
index d185f68bf..0485b0811 100644
--- a/t/stream-plugin/syslog.t
+++ b/t/stream-plugin/syslog.t
@@ -346,3 +346,71 @@ qr/sending a batch logs to 127.0.0.1:(\d+)/
 --- grep_error_log_out
 sending a batch logs to 127.0.0.1:5044
 sending a batch logs to 127.0.0.1:5045
+
+
+
+=== TEST 8: log format in plugin
+--- config
+    location /t {
+        content_by_lua_block {
+            local t = require("lib.test_admin").test
+            local code, body = t('/apisix/admin/stream_routes/1',
+                 ngx.HTTP_PUT,
+                 [[{
+                        "plugins": {
+                            "syslog": {
+                                "batch_max_size": 1,
+                                "flush_limit": 1,
+                                "log_format": {
+                                    "vip": "$remote_addr"
+                                },
+                                "host" : "127.0.0.1",
+                                "port" : 5050
+                            }
+                        },
+                        "upstream": {
+                            "nodes": {
+                                "127.0.0.1:1995": 1
+                            },
+                            "type": "roundrobin"
+                        },
+                        "uri": "/hello"
+                }]]
+                )
+
+            if code >= 300 then
+                ngx.status = code
+                ngx.say(body)
+                return
+            end
+
+            ngx.say(body)
+        }
+    }
+--- request
+GET /t
+--- response_body
+passed
+
+
+
+=== TEST 9: access
+--- stream_extra_init_by_lua
+    local syslog = require("apisix.plugins.syslog.init")
+    local json = require("apisix.core.json")
+    local log = require("apisix.core.log")
+    local old_f = syslog.push_entry
+    syslog.push_entry = function(conf, ctx, entry)
+        assert(entry.vip == "127.0.0.1")
+        log.info("push_entry is called with data: ", json.encode(entry))
+        return old_f(conf, ctx, entry)
+    end
+--- stream_request
+mmm
+--- stream_response
+hello world
+--- wait: 0.5
+--- no_error_log
+[error]
+--- error_log
+push_entry is called with data

Reply via email to