This is an automated email from the ASF dual-hosted git repository.
spacewander pushed a commit to branch master
in repository https://gitbox.apache.org/repos/asf/apisix.git
The following commit(s) were added to refs/heads/master by this push:
new e08ec60 feat(splunk): support splunk hec logging plugin (#5819)
e08ec60 is described below
commit e08ec60883c5f1de733fc5c4ddd60e99b3e0f776
Author: 帅进超 <[email protected]>
AuthorDate: Mon Dec 20 17:51:11 2021 +0800
feat(splunk): support splunk hec logging plugin (#5819)
---
README.md | 2 +-
apisix/plugins/splunk-hec-logging.lua | 150 ++++++++++++++++
ci/pod/docker-compose.yml | 13 ++
conf/config-default.yaml | 1 +
docs/assets/images/plugin/splunk-hec-admin-cn.png | Bin 0 -> 462846 bytes
docs/assets/images/plugin/splunk-hec-admin-en.png | Bin 0 -> 444224 bytes
docs/en/latest/config.json | 3 +-
docs/en/latest/plugins/splunk-hec-logging.md | 143 ++++++++++++++++
docs/zh/latest/README.md | 2 +-
docs/zh/latest/config.json | 3 +-
docs/zh/latest/plugins/splunk-hec-logging.md | 143 ++++++++++++++++
t/admin/plugins.t | 1 +
t/plugin/splunk-hec-logging.t | 198 ++++++++++++++++++++++
13 files changed, 655 insertions(+), 4 deletions(-)
diff --git a/README.md b/README.md
index 3001650..23d2cb2 100644
--- a/README.md
+++ b/README.md
@@ -134,7 +134,7 @@ A/B testing, canary release, blue-green deployment, limit
rate, defense against
- High performance: The single-core QPS reaches 18k with an average delay of
fewer than 0.2 milliseconds.
- [Fault Injection](docs/en/latest/plugins/fault-injection.md)
- [REST Admin API](docs/en/latest/admin-api.md): Using the REST Admin API to
control Apache APISIX, which only allows 127.0.0.1 access by default, you can
modify the `allow_admin` field in `conf/config.yaml` to specify a list of IPs
that are allowed to call the Admin API. Also, note that the Admin API uses key
auth to verify the identity of the caller. **The `admin_key` field in
`conf/config.yaml` needs to be modified before deployment to ensure security**.
- - External Loggers: Export access logs to external log management tools.
([HTTP Logger](docs/en/latest/plugins/http-logger.md), [TCP
Logger](docs/en/latest/plugins/tcp-logger.md), [Kafka
Logger](docs/en/latest/plugins/kafka-logger.md), [UDP
Logger](docs/en/latest/plugins/udp-logger.md), [RocketMQ
Logger](docs/en/latest/plugins/rocketmq-logger.md), [SkyWalking
Logger](docs/en/latest/plugins/skywalking-logger.md), [Alibaba Cloud
Logging(SLS)](docs/en/latest/plugins/sls-logger.md), [Googl [...]
+ - External Loggers: Export access logs to external log management tools.
([HTTP Logger](docs/en/latest/plugins/http-logger.md), [TCP
Logger](docs/en/latest/plugins/tcp-logger.md), [Kafka
Logger](docs/en/latest/plugins/kafka-logger.md), [UDP
Logger](docs/en/latest/plugins/udp-logger.md), [RocketMQ
Logger](docs/en/latest/plugins/rocketmq-logger.md), [SkyWalking
Logger](docs/en/latest/plugins/skywalking-logger.md), [Alibaba Cloud
Logging(SLS)](docs/en/latest/plugins/sls-logger.md), [Googl [...]
- [Datadog](docs/en/latest/plugins/datadog.md): push custom metrics to the
DogStatsD server, comes bundled with [Datadog
agent](https://docs.datadoghq.com/agent/), over the UDP protocol. DogStatsD
basically is an implementation of StatsD protocol which collects the custom
metrics for Apache APISIX agent, aggregates it into a single data point and
sends it to the configured Datadog server.
- [Helm charts](https://github.com/apache/apisix-helm-chart)
diff --git a/apisix/plugins/splunk-hec-logging.lua
b/apisix/plugins/splunk-hec-logging.lua
new file mode 100644
index 0000000..531e908
--- /dev/null
+++ b/apisix/plugins/splunk-hec-logging.lua
@@ -0,0 +1,150 @@
+--
+-- Licensed to the Apache Software Foundation (ASF) under one or more
+-- contributor license agreements. See the NOTICE file distributed with
+-- this work for additional information regarding copyright ownership.
+-- The ASF licenses this file to You under the Apache License, Version 2.0
+-- (the "License"); you may not use this file except in compliance with
+-- the License. You may obtain a copy of the License at
+--
+-- http://www.apache.org/licenses/LICENSE-2.0
+--
+-- Unless required by applicable law or agreed to in writing, software
+-- distributed under the License is distributed on an "AS IS" BASIS,
+-- WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+-- See the License for the specific language governing permissions and
+-- limitations under the License.
+--
+
+local core = require("apisix.core")
+local ngx = ngx
+local ngx_now = ngx.now
+local http = require("resty.http")
+local log_util = require("apisix.utils.log-util")
+local bp_manager_mod = require("apisix.utils.batch-processor-manager")
+
+
+local DEFAULT_SPLUNK_HEC_ENTRY_SOURCE = "apache-apisix-splunk-hec-logging"
+local DEFAULT_SPLUNK_HEC_ENTRY_TYPE = "_json"
+
+
+local plugin_name = "splunk-hec-logging"
+local batch_processor_manager = bp_manager_mod.new(plugin_name)
+
+
+local schema = {
+ type = "object",
+ properties = {
+ endpoint = {
+ type = "object",
+ properties = {
+ uri = core.schema.uri_def,
+ token = {
+ type = "string",
+ },
+ channel = {
+ type = "string",
+ },
+ timeout = {
+ type = "integer",
+ minimum = 1,
+ default = 10
+ }
+ },
+ required = { "uri", "token" }
+ },
+ ssl_verify = {
+ type = "boolean",
+ default = true
+ },
+ },
+ required = { "endpoint" },
+}
+
+
+local _M = {
+ version = 0.1,
+ priority = 409,
+ name = plugin_name,
+ schema = batch_processor_manager:wrap_schema(schema),
+}
+
+
+function _M.check_schema(conf)
+ return core.schema.check(schema, conf)
+end
+
+
+local function get_logger_entry(conf)
+ local entry = log_util.get_full_log(ngx, conf)
+ return {
+ time = ngx_now(),
+ host = entry.server.hostname,
+ source = DEFAULT_SPLUNK_HEC_ENTRY_SOURCE,
+ sourcetype = DEFAULT_SPLUNK_HEC_ENTRY_TYPE,
+ event = {
+ request_url = entry.request.url,
+ request_method = entry.request.method,
+ request_headers = entry.request.headers,
+ request_query = entry.request.querystring,
+ request_size = entry.request.size,
+ response_headers = entry.response.headers,
+ response_status = entry.response.status,
+ response_size = entry.response.size,
+ latency = entry.latency,
+ upstream = entry.upstream,
+ }
+ }
+end
+
+
+local function send_to_splunk(conf, entries)
+ local request_headers = {}
+ request_headers["Content-Type"] = "application/json"
+ request_headers["Authorization"] = "Splunk " .. conf.endpoint.token
+ if conf.endpoint.channel then
+ request_headers["X-Splunk-Request-Channel"] = conf.endpoint.channel
+ end
+
+ local http_new = http.new()
+ http_new:set_timeout(conf.endpoint.timeout * 1000)
+ local res, err = http_new:request_uri(conf.endpoint.uri, {
+ ssl_verify = conf.ssl_verify,
+ method = "POST",
+ body = core.json.encode(entries),
+ headers = request_headers,
+ })
+
+ if err then
+ return false, "failed to write log to splunk, " .. err
+ end
+
+ if res.status ~= 200 then
+ local body
+ body, err = core.json.decode(res.body)
+ if err then
+ return false, "failed to send splunk, http status code: " ..
res.status
+ else
+ return false, "failed to send splunk, " .. body.text
+ end
+ end
+
+ return true
+end
+
+
+function _M.log(conf, ctx)
+ local entry = get_logger_entry(conf)
+
+ if batch_processor_manager:add_entry(conf, entry) then
+ return
+ end
+
+ local process = function(entries)
+ return send_to_splunk(conf, entries)
+ end
+
+ batch_processor_manager:add_entry_to_new_processor(conf, entry, ctx,
process)
+end
+
+
+return _M
diff --git a/ci/pod/docker-compose.yml b/ci/pod/docker-compose.yml
index b5d8062..16497f2 100644
--- a/ci/pod/docker-compose.yml
+++ b/ci/pod/docker-compose.yml
@@ -413,6 +413,19 @@ services:
networks:
opa_net:
+ # Splunk HEC Logging Service
+ splunk:
+ image: splunk/splunk:8.2.3
+ restart: unless-stopped
+ ports:
+ - "18088:8088"
+ environment:
+ SPLUNK_PASSWORD: "ApacheAPISIX@666"
+ SPLUNK_START_ARGS: "--accept-license"
+ SPLUNK_HEC_TOKEN: "BD274822-96AA-4DA6-90EC-18940FB2414C"
+ SPLUNK_HEC_SSL: "False"
+
+
networks:
apisix_net:
consul_net:
diff --git a/conf/config-default.yaml b/conf/config-default.yaml
index b53b32b..ca923ba 100644
--- a/conf/config-default.yaml
+++ b/conf/config-default.yaml
@@ -366,6 +366,7 @@ plugins: # plugin list (sorted by
priority)
- datadog # priority: 495
- echo # priority: 412
- http-logger # priority: 410
+ - splunk-hec-logging # priority: 409
- skywalking-logger # priority: 408
- google-cloud-logging # priority: 407
- sls-logger # priority: 406
diff --git a/docs/assets/images/plugin/splunk-hec-admin-cn.png
b/docs/assets/images/plugin/splunk-hec-admin-cn.png
new file mode 100644
index 0000000..e8997d1
Binary files /dev/null and b/docs/assets/images/plugin/splunk-hec-admin-cn.png
differ
diff --git a/docs/assets/images/plugin/splunk-hec-admin-en.png
b/docs/assets/images/plugin/splunk-hec-admin-en.png
new file mode 100644
index 0000000..b70678e
Binary files /dev/null and b/docs/assets/images/plugin/splunk-hec-admin-en.png
differ
diff --git a/docs/en/latest/config.json b/docs/en/latest/config.json
index 72f027f..1094fd7 100644
--- a/docs/en/latest/config.json
+++ b/docs/en/latest/config.json
@@ -122,7 +122,8 @@
"plugins/log-rotate",
"plugins/error-log-logger",
"plugins/sls-logger",
- "plugins/google-cloud-logging"
+ "plugins/google-cloud-logging",
+ "plugins/splunk-hec-logging"
]
},
{
diff --git a/docs/en/latest/plugins/splunk-hec-logging.md
b/docs/en/latest/plugins/splunk-hec-logging.md
new file mode 100644
index 0000000..2d63329
--- /dev/null
+++ b/docs/en/latest/plugins/splunk-hec-logging.md
@@ -0,0 +1,143 @@
+---
+title: splunk-hec-logging
+---
+
+<!--
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+-->
+
+## Summary
+
+- [**Name**](#name)
+- [**Attributes**](#attributes)
+- [**How To Enable**](#how-to-enable)
+- [**Test Plugin**](#test-plugin)
+- [**Disable Plugin**](#disable-plugin)
+
+## Name
+
+The `splunk-hec-logging` plugin is used to forward the request log of `Apache
APISIX` to `Splunk HTTP Event Collector (HEC)` for analysis and storage. After
the plugin is enabled, `Apache APISIX` will obtain request context information
in `Log Phase` serialize it into [Splunk Event Data
format](https://docs.splunk.com/Documentation/Splunk/latest/Data/FormateventsforHTTPEventCollector#Event_metadata)
and submit it to the batch queue. When the maximum processing capacity of each
batch of t [...]
+
+For more info on Batch-Processor in Apache APISIX please refer to:
+[Batch-Processor](../batch-processor.md)
+
+## Attributes
+
+| Name | Requirement | Default
| Description
|
+| ----------------------- | ------------- |
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
+| endpoint | required |
| Splunk HEC endpoint configuration info
|
+| endpoint.uri | required |
| Splunk HEC event collector API
|
+| endpoint.token | required |
| Splunk HEC authentication token
|
+| endpoint.channel | optional |
| Splunk HEC send data channel identifier, refer to: [About HTTP Event
Collector Indexer
Acknowledgment](https://docs.splunk.com/Documentation/Splunk/8.2.3/Data/AboutHECIDXAck)
|
+| endpoint.timeout | optional | 10
| Splunk HEC send data timeout, time unit: (seconds)
|
+| ssl_verify | optional | true
| enable `SSL` verification, option as per [OpenResty
docs](https://github.com/openresty/lua-nginx-module#tcpsocksslhandshake)
|
+| max_retry_count | optional | 0
| max number of retries before removing from the processing pipe line
|
+| retry_delay | optional | 1
| number of seconds the process execution should be delayed if the execution
fails
|
+| buffer_duration | optional | 60
| max age in seconds of the oldest entry in a batch before the batch must be
processed
|
+| inactive_timeout | optional | 5
| max age in seconds when the buffer will be flushed if inactive
|
+| batch_max_size | optional | 1000
| max size of each batch
|
+
+## How To Enable
+
+The following is an example of how to enable the `splunk-hec-logging` for a
specific route.
+
+### Full configuration
+
+```shell
+curl http://127.0.0.1:9080/apisix/admin/routes/1 -H 'X-API-KEY:
edd1c9f034335f136f87ad84b625c8f1' -X PUT -d '
+{
+ "plugins":{
+ "splunk-hec-logging":{
+ "endpoint":{
+ "uri":"http://127.0.0.1:8088/services/collector",
+ "token":"BD274822-96AA-4DA6-90EC-18940FB2414C",
+ "channel":"FE0ECFAD-13D5-401B-847D-77833BD77131",
+ "timeout":60
+ },
+ "buffer_duration":60,
+ "max_retry_count":0,
+ "retry_delay":1,
+ "inactive_timeout":2,
+ "batch_max_size":10
+ }
+ },
+ "upstream":{
+ "type":"roundrobin",
+ "nodes":{
+ "127.0.0.1:1980":1
+ }
+ },
+ "uri":"/splunk.do"
+}'
+```
+
+### Minimize configuration
+
+```shell
+curl http://127.0.0.1:9080/apisix/admin/routes/1 -H 'X-API-KEY:
edd1c9f034335f136f87ad84b625c8f1' -X PUT -d '
+{
+ "plugins":{
+ "splunk-hec-logging":{
+ "endpoint":{
+ "uri":"http://127.0.0.1:8088/services/collector",
+ "token":"BD274822-96AA-4DA6-90EC-18940FB2414C"
+ }
+ }
+ },
+ "upstream":{
+ "type":"roundrobin",
+ "nodes":{
+ "127.0.0.1:1980":1
+ }
+ },
+ "uri":"/splunk.do"
+}'
+```
+
+## Test Plugin
+
+* Send request to route configured with the `splunk-hec-logging` plugin
+
+```shell
+$ curl -i http://127.0.0.1:9080/splunk.do?q=hello
+HTTP/1.1 200 OK
+...
+hello, world
+```
+
+* Login to Splunk Dashboard to search and view
+
+
+
+## Disable Plugin
+
+Disabling the `splunk-hec-logging` plugin is very simple, just remove the
`JSON` configuration corresponding to `splunk-hec-logging`.
+
+```shell
+$ curl http://127.0.0.1:9080/apisix/admin/routes/1 -H 'X-API-KEY:
edd1c9f034335f136f87ad84b625c8f1' -X PUT -d '
+{
+ "uri": "/hello",
+ "plugins": {},
+ "upstream": {
+ "type": "roundrobin",
+ "nodes": {
+ "127.0.0.1:1980": 1
+ }
+ }
+}'
+```
diff --git a/docs/zh/latest/README.md b/docs/zh/latest/README.md
index b2e43c2..88d46fd 100644
--- a/docs/zh/latest/README.md
+++ b/docs/zh/latest/README.md
@@ -135,7 +135,7 @@ A/B 测试、金丝雀发布(灰度发布)、蓝绿部署、限流限速、抵
- 高性能:在单核上 QPS 可以达到 18k,同时延迟只有 0.2 毫秒。
- [故障注入](plugins/fault-injection.md)
- [REST Admin API](admin-api.md): 使用 REST Admin API 来控制 Apache APISIX,默认只允许
127.0.0.1 访问,你可以修改 `conf/config.yaml` 中的 `allow_admin` 字段,指定允许调用 Admin API 的 IP
列表。同时需要注意的是,Admin API 使用 key auth 来校验调用者身份,**在部署前需要修改 `conf/config.yaml` 中的
`admin_key` 字段,来保证安全。**
- - 外部日志记录器:将访问日志导出到外部日志管理工具。([HTTP Logger](plugins/http-logger.md), [TCP
Logger](plugins/tcp-logger.md), [Kafka Logger](plugins/kafka-logger.md), [UDP
Logger](plugins/udp-logger.md), [RocketMQ Logger](plugins/rocketmq-logger.md),
[SkyWalking Logger](plugins/skywalking-logger.md), [Alibaba Cloud
Logging(SLS)](plugins/sls-logger.md), [Google Cloud
Logging](plugins/google-cloud-logging.md))
+ - 外部日志记录器:将访问日志导出到外部日志管理工具。([HTTP Logger](plugins/http-logger.md)、[TCP
Logger](plugins/tcp-logger.md)、[Kafka Logger](plugins/kafka-logger.md)、[UDP
Logger](plugins/udp-logger.md)、[RocketMQ
Logger](plugins/rocketmq-logger.md)、[SkyWalking
Logger](plugins/skywalking-logger.md)、[Alibaba Cloud
Logging(SLS)](plugins/sls-logger.md)、[Google Cloud
Logging](plugins/google-cloud-logging.md)、[Splunk HEC
Logging](plugins/splunk-hec-logging.md))
- [Helm charts](https://github.com/apache/apisix-helm-chart)
- **高度可扩展**
diff --git a/docs/zh/latest/config.json b/docs/zh/latest/config.json
index cd914b7..7ac810c 100644
--- a/docs/zh/latest/config.json
+++ b/docs/zh/latest/config.json
@@ -120,7 +120,8 @@
"plugins/log-rotate",
"plugins/error-log-logger",
"plugins/sls-logger",
- "plugins/google-cloud-logging"
+ "plugins/google-cloud-logging",
+ "plugins/splunk-hec-logging"
]
},
{
diff --git a/docs/zh/latest/plugins/splunk-hec-logging.md
b/docs/zh/latest/plugins/splunk-hec-logging.md
new file mode 100644
index 0000000..719a581
--- /dev/null
+++ b/docs/zh/latest/plugins/splunk-hec-logging.md
@@ -0,0 +1,143 @@
+---
+title: splunk-hec-logging
+---
+
+<!--
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+-->
+
+## 摘要
+
+- [**定义**](#定义)
+- [**属性列表**](#属性列表)
+- [**如何开启**](#如何开启)
+- [**测试插件**](#测试插件)
+- [**禁用插件**](#禁用插件)
+
+## 定义
+
+`splunk-hec-logging` 插件用于将 `Apache APISIX` 的请求日志转发到 `Splunk HTTP 事件收集器(HEC)`
中进行分析和存储,启用该插件后 `Apache APISIX` 将在 `Log Phase` 获取请求上下文信息并序列化为 [Splunk Event
Data
格式](https://docs.splunk.com/Documentation/Splunk/latest/Data/FormateventsforHTTPEventCollector#Event_metadata)
后提交到批处理队列中,当触发批处理队列每批次最大处理容量或刷新缓冲区的最大时间时会将队列中的数据提交到 `Splunk HEC` 中。
+
+有关 `Apache APISIX` 的 `Batch-Processor` 的更多信息,请参考:
+[Batch-Processor](../batch-processor.md)
+
+## 属性列表
+
+| 名称 | 是否必需 | 默认值
| 描述
|
+| ----------------------- | -------- |
-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
------------------------------------------------------------------------------------------------------------------------------------------------------
|
+| endpoint | 必选 |
| Splunk
HEC 端点配置信息
|
+| endpoint.uri | 必选 |
| Splunk
HEC 事件收集API
|
+| endpoint.token | 必选 |
| Splunk
HEC 身份令牌
|
+| endpoint.channel | 可选 |
| Splunk
HEC 发送渠道标识,参考:[About HTTP Event Collector Indexer
Acknowledgment](https://docs.splunk.com/Documentation/Splunk/8.2.3/Data/AboutHECIDXAck)
|
+| endpoint.timeout | 可选 | 10
| Splunk
HEC 数据提交超时时间(以秒为单位)
|
+| ssl_verify | 可选 | true
| 启用
`SSL` 验证,
参考:[OpenResty文档](https://github.com/openresty/lua-nginx-module#tcpsocksslhandshake)
|
+| max_retry_count | 可选 | 0
|
从处理管道中移除之前的最大重试次数
|
+| retry_delay | 可选 | 1
|
如果执行失败,流程执行应延迟的秒数
|
+| buffer_duration | 可选 | 60
|
必须先处理批次中最旧条目的最大期限(以秒为单位)
|
+| inactive_timeout | 可选 | 5
|
刷新缓冲区的最大时间(以秒为单位)
|
+| batch_max_size | 可选 | 1000
|
每个批处理队列可容纳的最大条目数
|
+
+## 如何开启
+
+下面例子展示了如何为指定路由开启 `splunk-hec-logging` 插件。
+
+### 完整配置
+
+```shell
+curl http://127.0.0.1:9080/apisix/admin/routes/1 -H 'X-API-KEY:
edd1c9f034335f136f87ad84b625c8f1' -X PUT -d '
+{
+ "plugins":{
+ "splunk-hec-logging":{
+ "endpoint":{
+ "uri":"http://127.0.0.1:8088/services/collector",
+ "token":"BD274822-96AA-4DA6-90EC-18940FB2414C",
+ "channel":"FE0ECFAD-13D5-401B-847D-77833BD77131",
+ "timeout":60
+ },
+ "buffer_duration":60,
+ "max_retry_count":0,
+ "retry_delay":1,
+ "inactive_timeout":2,
+ "batch_max_size":10
+ }
+ },
+ "upstream":{
+ "type":"roundrobin",
+ "nodes":{
+ "127.0.0.1:1980":1
+ }
+ },
+ "uri":"/splunk.do"
+}'
+```
+
+### 最小化配置
+
+```shell
+curl http://127.0.0.1:9080/apisix/admin/routes/1 -H 'X-API-KEY:
edd1c9f034335f136f87ad84b625c8f1' -X PUT -d '
+{
+ "plugins":{
+ "splunk-hec-logging":{
+ "endpoint":{
+ "uri":"http://127.0.0.1:8088/services/collector",
+ "token":"BD274822-96AA-4DA6-90EC-18940FB2414C"
+ }
+ }
+ },
+ "upstream":{
+ "type":"roundrobin",
+ "nodes":{
+ "127.0.0.1:1980":1
+ }
+ },
+ "uri":"/splunk.do"
+}'
+```
+
+## 测试插件
+
+* 向配置 `splunk-hec-logging` 插件的路由发送请求
+
+```shell
+$ curl -i http://127.0.0.1:9080/splunk.do?q=hello
+HTTP/1.1 200 OK
+...
+hello, world
+```
+
+* 登录Splunk控制台检索查看日志
+
+
+
+## 禁用插件
+
+禁用 `splunk-hec-logging` 插件非常简单,只需将 `splunk-hec-logging` 对应的 `JSON` 配置移除即可。
+
+```shell
+$ curl http://127.0.0.1:9080/apisix/admin/routes/1 -H 'X-API-KEY:
edd1c9f034335f136f87ad84b625c8f1' -X PUT -d '
+{
+ "uri": "/hello",
+ "plugins": {},
+ "upstream": {
+ "type": "roundrobin",
+ "nodes": {
+ "127.0.0.1:1980": 1
+ }
+ }
+}'
+```
diff --git a/t/admin/plugins.t b/t/admin/plugins.t
index c080634..2495b53 100644
--- a/t/admin/plugins.t
+++ b/t/admin/plugins.t
@@ -102,6 +102,7 @@ prometheus
datadog
echo
http-logger
+splunk-hec-logging
skywalking-logger
google-cloud-logging
sls-logger
diff --git a/t/plugin/splunk-hec-logging.t b/t/plugin/splunk-hec-logging.t
new file mode 100644
index 0000000..22d38ec
--- /dev/null
+++ b/t/plugin/splunk-hec-logging.t
@@ -0,0 +1,198 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements. See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+use t::APISIX 'no_plan';
+
+repeat_each(1);
+no_long_string();
+no_root_location();
+
+add_block_preprocessor(sub {
+ my ($block) = @_;
+
+ if ((!defined $block->error_log) && (!defined $block->no_error_log)) {
+ $block->set_value("no_error_log", "[error]");
+ }
+
+ if (!defined $block->request) {
+ $block->set_value("request", "GET /t");
+ }
+
+});
+
+run_tests();
+
+__DATA__
+
+=== TEST 1: configuration verification
+--- config
+ location /t {
+ content_by_lua_block {
+ local ok, err
+ local configs = {
+ -- full configuration
+ {
+ endpoint = {
+ uri = "http://127.0.0.1:18088/services/collector",
+ token = "BD274822-96AA-4DA6-90EC-18940FB2414C",
+ channel = "FE0ECFAD-13D5-401B-847D-77833BD77131",
+ timeout = 60
+ },
+ max_retry_count = 0,
+ retry_delay = 1,
+ buffer_duration = 60,
+ inactive_timeout = 2,
+ batch_max_size = 10,
+ },
+ -- minimize configuration
+ {
+ endpoint = {
+ uri = "http://127.0.0.1:18088/services/collector",
+ token = "BD274822-96AA-4DA6-90EC-18940FB2414C",
+ }
+ },
+ -- property "uri" is required
+ {
+ endpoint = {
+ token = "BD274822-96AA-4DA6-90EC-18940FB2414C",
+ }
+ },
+ -- property "token" is required
+ {
+ endpoint = {
+ uri = "http://127.0.0.1:18088/services/collector",
+ }
+ },
+ -- property "uri" validation failed
+ {
+ endpoint = {
+ uri = "127.0.0.1:18088/services/collector",
+ token = "BD274822-96AA-4DA6-90EC-18940FB2414C",
+ }
+ }
+ }
+
+ local plugin = require("apisix.plugins.splunk-hec-logging")
+ for i = 1, #configs do
+ ok, err = plugin.check_schema(configs[i])
+ if err then
+ ngx.say(err)
+ else
+ ngx.say("passed")
+ end
+ end
+ }
+ }
+--- response_body_like
+passed
+passed
+property "endpoint" validation failed: property "uri" is required
+property "endpoint" validation failed: property "token" is required
+property "endpoint" validation failed: property "uri" validation failed.*
+
+
+
+=== TEST 2: set route (failed auth)
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, {
+ uri = "/hello",
+ upstream = {
+ type = "roundrobin",
+ nodes = {
+ ["127.0.0.1:1980"] = 1
+ }
+ },
+ plugins = {
+ ["splunk-hec-logging"] = {
+ endpoint = {
+ uri = "http://127.0.0.1:18088/services/collector",
+ token = "BD274822-96AA-4DA6-90EC-18940FB24444"
+ },
+ batch_max_size = 1,
+ inactive_timeout = 1
+ }
+ }
+ })
+
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+ }
+--- response_body
+passed
+
+
+
+=== TEST 3: test route (failed auth)
+--- request
+GET /hello
+--- wait: 2
+--- response_body
+hello world
+--- error_log
+Batch Processor[splunk-hec-logging] failed to process entries: failed to send
splunk, Invalid token
+Batch Processor[splunk-hec-logging] exceeded the max_retry_count
+
+
+
+=== TEST 4: set route (success write)
+--- config
+ location /t {
+ content_by_lua_block {
+ local t = require("lib.test_admin").test
+ local code, body = t('/apisix/admin/routes/1', ngx.HTTP_PUT, {
+ uri = "/hello",
+ upstream = {
+ type = "roundrobin",
+ nodes = {
+ ["127.0.0.1:1980"] = 1
+ }
+ },
+ plugins = {
+ ["splunk-hec-logging"] = {
+ endpoint = {
+ uri = "http://127.0.0.1:18088/services/collector",
+ token = "BD274822-96AA-4DA6-90EC-18940FB2414C"
+ },
+ batch_max_size = 1,
+ inactive_timeout = 1
+ }
+ }
+ })
+
+ if code >= 300 then
+ ngx.status = code
+ end
+ ngx.say(body)
+ }
+ }
+--- response_body
+passed
+
+
+
+=== TEST 5: test route (success write)
+--- request
+GET /hello
+--- wait: 2
+--- response_body
+hello world