This is an automated email from the ASF dual-hosted git repository.
shreemaanabhishek pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/apisix.git
The following commit(s) were added to refs/heads/master by this push:
new 0c6ca07ca feat: support authentication via headers (#12994)
0c6ca07ca is described below
commit 0c6ca07cab316d1496b3b51cfe36fe198afb2693
Author: Shreemaan Abhishek <[email protected]>
AuthorDate: Wed Feb 11 20:20:45 2026 +0545
feat: support authentication via headers (#12994)
---
apisix/plugins/elasticsearch-logger.lua | 32 +++++++++++++++--
docs/en/latest/plugins/elasticsearch-logger.md | 1 +
docs/zh/latest/plugins/elasticsearch-logger.md | 1 +
t/plugin/elasticsearch-logger2.t | 50 ++++++++++++++++++++++++++
4 files changed, 82 insertions(+), 2 deletions(-)
diff --git a/apisix/plugins/elasticsearch-logger.lua
b/apisix/plugins/elasticsearch-logger.lua
index 3d9da16bb..a3114e98e 100644
--- a/apisix/plugins/elasticsearch-logger.lua
+++ b/apisix/plugins/elasticsearch-logger.lua
@@ -23,6 +23,7 @@ local plugin = require("apisix.plugin")
local ngx = ngx
local str_format = core.string.format
local math_random = math.random
+local pairs = pairs
local plugin_name = "elasticsearch-logger"
local batch_processor_manager = bp_manager_mod.new(plugin_name)
@@ -61,9 +62,22 @@ local schema = {
password = {
type = "string",
minLength = 1
- },
+ }
+ },
+ oneOf = {
+ {required = {"username", "password"}},
+ }
+ },
+ headers = {
+ type = "object",
+ minProperties = 1,
+ patternProperties = {
+ ["^[^:]+$"] = {
+ type = "string",
+ minLength = 1
+ }
},
- required = {"username", "password"},
+ additionalProperties = false
},
timeout = {
type = "integer",
@@ -140,6 +154,7 @@ local function get_es_major_version(uri, conf)
if not httpc then
return nil, "failed to create http client"
end
+
local headers = {}
if conf.auth then
local authorization = "Basic " .. ngx.encode_base64(
@@ -147,6 +162,13 @@ local function get_es_major_version(uri, conf)
)
headers["Authorization"] = authorization
end
+
+ if conf.headers then
+ for k, v in pairs(conf.headers) do
+ headers[k] = v
+ end
+ end
+
httpc:set_timeout(conf.timeout * 1000)
local res, err = httpc:request_uri(uri, {
ssl_verify = conf.ssl_verify,
@@ -236,6 +258,12 @@ local function send_to_elasticsearch(conf, entries)
headers["Authorization"] = authorization
end
+ if conf.headers then
+ for k, v in pairs(conf.headers) do
+ headers[k] = v
+ end
+ end
+
core.log.info("uri: ", uri, ", body: ", body)
httpc:set_timeout(conf.timeout * 1000)
diff --git a/docs/en/latest/plugins/elasticsearch-logger.md
b/docs/en/latest/plugins/elasticsearch-logger.md
index acb46001a..ff7537783 100644
--- a/docs/en/latest/plugins/elasticsearch-logger.md
+++ b/docs/en/latest/plugins/elasticsearch-logger.md
@@ -46,6 +46,7 @@ The `elasticsearch-logger` Plugin pushes request and response
logs in batches to
| auth | array | False | |
Elasticsearch
[authentication](https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html)
configuration. |
| auth.username | string | True | |
Elasticsearch
[authentication](https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html)
username. |
| auth.password | string | True | |
Elasticsearch
[authentication](https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html)
password. |
+| headers | object | False | | Custom headers
to send with requests as key-value pairs. For example: `{"Authorization":
"Bearer token", "X-API-Key": "key"}`. |
| ssl_verify | boolean | False | true | If true,
perform SSL verification. |
| timeout | integer | False | 10 |
Elasticsearch send data timeout in seconds. |
| include_req_body | boolean | False | false | If true,
include the request body in the log. Note that if the request body is too big
to be kept in the memory, it can not be logged due to NGINX's limitations.
|
diff --git a/docs/zh/latest/plugins/elasticsearch-logger.md
b/docs/zh/latest/plugins/elasticsearch-logger.md
index abf43df23..aad8a3ce8 100644
--- a/docs/zh/latest/plugins/elasticsearch-logger.md
+++ b/docs/zh/latest/plugins/elasticsearch-logger.md
@@ -47,6 +47,7 @@ description: elasticsearch-logger Plugin 将请求和响应日志批量推送到
| auth | array | 否 | | Elasticsearch
[身份验证](https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html)
配置。 |
| auth.username | string | 是 | | Elasticsearch
[身份验证](https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html)
用户名。 |
| auth.password | string | 是 | | Elasticsearch
[身份验证](https://www.elastic.co/guide/en/elasticsearch/reference/current/setting-up-authentication.html)
密码。 |
+| headers | object | 否 | | 自定义请求标头,以键值对形式配置。例如 `{"Authorization": "Bearer
token", "X-API-Key": "key"}`。 |
| ssl_verify | boolean | 否 | true | 如果为 true,则执行 SSL 验证。 |
| timeout | integer | 否 | 10 | Elasticsearch 发送数据超时(秒)。 |
| include_req_body | boolean | 否 | false |如果为
true,则将请求主体包含在日志中。请注意,如果请求主体太大而无法保存在内存中,则由于 NGINX 的限制而无法记录。|
diff --git a/t/plugin/elasticsearch-logger2.t b/t/plugin/elasticsearch-logger2.t
index 18610a3e6..93e51898f 100644
--- a/t/plugin/elasticsearch-logger2.t
+++ b/t/plugin/elasticsearch-logger2.t
@@ -119,3 +119,53 @@ location /t {
--- error_log
max pending entries limit exceeded. discarding entry
--- timeout: 5
+
+
+
+=== TEST 2: set route with header auth
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, {
+ uri = "/hello",
+ upstream = {
+ type = "roundrobin",
+ nodes = {
+ ["127.0.0.1:1980"] = 1
+ }
+ },
+ plugins = {
+ ["elasticsearch-logger"] = {
+ endpoint_addr = "http://127.0.0.1:9201",
+ field = {
+ index = "services"
+ },
+ headers = {
+ Authorization = "Basic ZWxhc3RpYzoxMjM0NTY="
+ },
+ batch_max_size = 1,
+ inactive_timeout = 1
+ }
+ }
+ })
+
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+ }
+--- response_body
+passed
+
+
+
+=== TEST 3: test route (auth success)
+--- request
+GET /hello
+--- wait: 2
+--- response_body
+hello world
+--- error_log
+Batch Processor[elasticsearch-logger] successfully processed the entries